blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f36d02da184f8af38501b9b4024f49f2818710a7
|
17104ec32410aa75638b6264f8ec7621d4722f13
|
/settings.py
|
4267c87782fb790f392d0efbf3a1c4da4f305697
|
[] |
no_license
|
kdudkov/modbus_server
|
fd7ee367658f5974cf2366f73c4649fd51ad4719
|
28376345d71f095f482db5237f533f86f2aedfd8
|
refs/heads/master
| 2021-06-04T04:12:36.631039
| 2018-02-04T19:55:59
| 2018-02-04T19:55:59
| 56,672,185
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 120
|
py
|
from translators import SimpleChiniseBoard
modbus_serial_port = '/dev/rs485'
translators = [SimpleChiniseBoard(1), ]
|
[
"kdudkov@ya.ru"
] |
kdudkov@ya.ru
|
2e0178ccf2d9b0262a0c5b3da2ca9630a3ca4b9f
|
7cc44140110528c3eb3b0ef141d85500d506c390
|
/projets/CHALLET GITENAIT/projet5.py
|
5a4ca0c99ce249284cea501f2a5503d5a9682dab
|
[] |
no_license
|
cedric-lemery/ISN2020
|
3d15568dfde4cafe03a5cd994957e42ac0018fef
|
a814dd91d0ecacfbcf5af16e4c0001d6e20b3e24
|
refs/heads/master
| 2021-03-29T04:23:42.424393
| 2020-06-14T17:42:55
| 2020-06-14T17:42:55
| 247,919,858
| 1
| 4
| null | 2020-06-14T17:31:23
| 2020-03-17T08:44:29
|
Python
|
UTF-8
|
Python
| false
| false
| 4,019
|
py
|
#tristan
#audric
#Flappy bird
import pygame
import traceback
from random import randint
import time
from pygame.locals import *
pygame.init()
temps = pygame.time.Clock()
largeur= 1200
hauteur= 600
fenetre= pygame.display.set_mode((largeur,hauteur))
pygame.display.set_caption("Flappy Bird")
fond=pygame.image.load("fond.png").convert()
fond=pygame.transform.scale(fond,(largeur,hauteur))
def msgfin():
chaine="Game over"
font=pygame.font.SysFont("Lobster",250,bold=False,italic=False)
text=font.render(chaine,1,(255,100,100))
fenetre.blit(text,(100,110))
def msgrejouer():
chaine="Pour rejouer appuyer sur 'r'"
font=pygame.font.SysFont("Lobster",100,bold=False,italic=False)
text=font.render(chaine,1,(255,180,100))
fenetre.blit(text,(100,400))
def gameover() :
msgfin()
msgrejouer()
pygame.display.flip()
while choix() == None:
temps.tick
jeux()
def choix():
for event in pygame.event.get():
if event.type==QUIT:
pygame.quit()
exit()
if event.type==KEYDOWN :
if event.key==K_r :
return event.key
return None
def jeux():
y=0
perso= pygame.image.load("perso.png").convert_alpha()
perso.set_colorkey((255,255,255))
perso=pygame.transform.scale(perso,(80,80))
persorect= perso.get_rect()
persorect= persorect.move(100,100)
tuyau0= pygame.image.load("tuyau0.png").convert_alpha()
tuyau0rect= tuyau0.get_rect()
tuyau0rect= tuyau0rect.move(600,500)
tuyau1= pygame.image.load("tuyau1.png").convert_alpha()
tuyau1rect= tuyau1.get_rect()
tuyau1rect= tuyau1rect.move(600,-170)
tuyau3= pygame.image.load("tuyau0.png").convert_alpha()
tuyau3rect= tuyau3.get_rect()
tuyau3rect= tuyau3rect.move(1200,500)
tuyau4= pygame.image.load("tuyau1.png").convert_alpha()
tuyau4rect= tuyau4.get_rect()
tuyau4rect= tuyau4rect.move(1200,-170)
collisions= [tuyau0rect,tuyau1rect,tuyau3rect,tuyau4rect]
fenetre.blit(fond,(0,0))
fenetre.blit(perso, persorect)
fenetre.blit(tuyau0, tuyau0rect)
fenetre.blit(tuyau1, tuyau1rect)
fenetre.blit(tuyau3, tuyau3rect)
fenetre.blit(tuyau4, tuyau4rect)
pygame.display.flip()
continuer=1
while continuer:
for event in pygame.event.get():
if event.type==QUIT:
continuer=0
if event.type==KEYDOWN :
if event.key==K_SPACE :
persorect= persorect.move(0,-150)
persorect= persorect.move(0,1)
tuyau0rect= tuyau0rect.move(-1,0)
tuyau1rect= tuyau1rect.move(-1,0)
tuyau3rect= tuyau3rect.move(-1,0)
tuyau4rect= tuyau4rect.move(-1,0)
collisions= [tuyau0rect,tuyau1rect,tuyau3rect,tuyau4rect]
fenetre.blit(fond,(0,0))
fenetre.blit(perso,persorect)
fenetre.blit(tuyau0, tuyau0rect)
fenetre.blit(tuyau1, tuyau1rect)
fenetre.blit(tuyau3, tuyau3rect)
fenetre.blit(tuyau4, tuyau4rect)
if persorect.collidelist(collisions)==0:
gameover()
if persorect.collidelist(collisions)==1:
gameover()
if persorect.collidelist(collisions)==2:
gameover()
if persorect.collidelist(collisions)==3:
gameover()
if persorect.bottom>600:
gameover()
if persorect.top<0:
gameover()
if persorect.right==1200:
gameover()
pygame.display.flip()
if tuyau0rect.right ==0:
tuyau0rect= tuyau0rect.move(1200,0)
tuyau1rect= tuyau1rect.move(1200,0)
if tuyau3rect.right ==0 :
tuyau3rect= tuyau3rect.move(1200,0)
tuyau4rect= tuyau4rect.move(1200,0)
try :
jeux()
except :
traceback.print_exc()
finally:
pygame.quit()
exit()
|
[
"32592390+cedric-lemery@users.noreply.github.com"
] |
32592390+cedric-lemery@users.noreply.github.com
|
0569a217747c9a5c1fadbd6e98e8480130bbee8d
|
bd5b3934969ebf4f693ceb4be17a68f9c3ebd414
|
/beginPython/fluentPython/ch08/copytest1.py
|
ba170cc4288d8b197b1b26738b0672ae5925b8f2
|
[] |
no_license
|
lyk4411/untitled
|
bc46863d3bbb2b71edf13947f24b892c2cf43e1a
|
875b7dfa765ffa40d76582d2ae41813d2e15c8bd
|
refs/heads/master
| 2021-04-06T09:09:08.977227
| 2021-03-10T02:56:34
| 2021-03-10T02:56:34
| 124,990,530
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 157
|
py
|
a = [10, 20]
b = [a, 30]
a.append(b)
print(a)
from copy import deepcopy
c = deepcopy(a)
print(c)
print(c[2])
print(c[2][0])
print(c[2][0][2])
d = c[2][0][2]
|
[
"moneyflying_2006@hotmail.com"
] |
moneyflying_2006@hotmail.com
|
6e898979b150fb54a4595eb314dab1b451458b57
|
2f48a4adb131621c7ad4d772af67b2c40f2f41d7
|
/Dev/cfehome/src/cfehome/settings/local.py
|
5b2998780c13424b1ac9087a958d349feb9b9ee3
|
[] |
no_license
|
akashbijwe/Python
|
ad0f6ae36e41e08a4ed24bfdcefde3287cefeb14
|
17948ece6835e240584d6835cce66eb75c4f1b22
|
refs/heads/master
| 2022-11-02T00:13:19.858003
| 2018-07-27T11:45:13
| 2018-07-27T11:45:13
| 141,707,337
| 2
| 1
| null | 2022-10-21T13:18:59
| 2018-07-20T12:05:30
|
Python
|
UTF-8
|
Python
| false
| false
| 3,116
|
py
|
"""
Django settings for cfehome project.
Generated by 'django-admin startproject' using Django 1.11.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'd3z%j57_y)5v*#2g314f(i0%&iy^*eo6)ls9_itfj4xi4kn@t_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cfehome.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cfehome.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
[
"mr.akashbijwe@gmail.com"
] |
mr.akashbijwe@gmail.com
|
afe4f7e00298821574b0f0210c6711e31cc6fca0
|
d0a358ca8fe4f2bf8743354bb8ab5ceab656bdc7
|
/alignment/wham/models.py
|
bdaae23a78d1d6563fb1a044370eb3ad3c3dbfbe
|
[] |
no_license
|
winobes/lasn
|
910b8bcd940dddee143e50264c4491e3092b1edc
|
ebef896ea64aa30c3be9e6f445dd09a235e15696
|
refs/heads/master
| 2021-01-24T08:20:05.138210
| 2019-04-13T17:03:04
| 2019-04-13T17:03:04
| 69,143,253
| 4
| 1
| null | 2017-11-17T22:49:29
| 2016-09-25T04:15:49
|
Python
|
UTF-8
|
Python
| false
| false
| 1,577
|
py
|
import pandas as pd
import pystan
def fit_wham_nosubpops(reply_pairs, markers, sample=None):
# merge the marker usage columns for the reply pair
for m in markers:
reply_pairs[m] = list(zip(reply_pairs[m+'_a'], reply_pairs[m+'_b']))
# reshape
df = pd.melt(reply_pairs, id_vars = ['user_a', 'user_b', 'utterance_id_b'], value_vars=markers, var_name='marker')
# change the marker labels to indices Stan will like
marker_idx = {m:i+1 for i,m in enumerate(markers)}
df['marker'] = df['marker'].apply(lambda x: marker_idx[x])
# reshape again
df = df.pivot_table(index=['user_a', 'user_b', 'marker'], columns='value', aggfunc='size', fill_value=0)
df = df.reset_index()
if sample:
df = df.sample(sample)
print(len(df))
data = {
"NumMarkers": len(markers),
"NumObservations": len(df),
"MarkerType": df.marker.values,
"NumUtterancesAB": (df[(True, True)] + df[(True, False)]).values,
"NumUtterancesNotAB": (df[(False, True)] + df[(False, False)]).values,
"CountsAB": df[(True, True)].values,
"CountsNotAB": df[(False, True)].values,
"StdDev": .25
}
## Compile the Stan model
sm = pystan.StanModel(file='alignment.cauchy.nosubpop.stan', verbose=True)
## Sample // fit the model to the data
import time
start = time.time()
fit = sm.sampling(data=data, iter=200, pars=['eta_ab_pop'], chains=2)
end = time.time()
print(end - start)
def fit_wham(reply_pairs, subpops_column, markers, sample=None):
pass
|
[
"winobes@gmail.com"
] |
winobes@gmail.com
|
970f789de95468e7ab88ed4a5cb7f0c5f50a547c
|
24a4bf21aa59350b016853349b0dfaa8c971516a
|
/YounJongSu/mid_hw/matrix_chain_multiplication.py
|
d18e61ef6538df4117430b1067f539e7ddd1981e
|
[] |
no_license
|
oereo/Algorithm_thursday
|
3f73791c10496d577dec3a6e424fc0dfae5f42c0
|
6850d7a4385bd33166c06ed4228428f376327e4e
|
refs/heads/master
| 2022-09-11T16:37:03.108128
| 2020-05-30T12:12:35
| 2020-05-30T12:12:35
| 263,600,788
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 489
|
py
|
import sys
def MatrixChainOrder(p, n):
m = [[0 for x in range(n)] for x in range(n)]
for L in range(2, n):
for i in range(1, n-L+1):
j = i + L - 1
m[i][j] = sys.maxsize
for k in range(i, j):
q = m[i][k] + m[k+1][j] + p[i-1]*p[k]*p[j]
if q<m[i][j]:
m[i][j] = q
print(m)
return m[1][n-1]
arr = [1,2,3,4]
size = len(arr)
print(str(MatrixChainOrder(arr, size)))
|
[
"dbs106@gmail.com"
] |
dbs106@gmail.com
|
b3d768c3ef0a4d459076b7697959c0d53bdb2fa1
|
17b1249a8d7868ce0f56e7b539308e571bcaf888
|
/conftest.py
|
a746df4229b24bbc82707f61853dcfce7b54425c
|
[] |
no_license
|
UyliaGor/lesson_3.6_step9
|
4041b586de9e3b936ad132abdde7ebd193724c89
|
fbd16002cbf5527285aa98650ed0c3049c1c885f
|
refs/heads/main
| 2023-07-09T00:49:04.445614
| 2021-07-22T09:35:22
| 2021-07-22T09:35:22
| 388,182,214
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,297
|
py
|
import pytest
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import NoSuchElementException
def pytest_addoption(parser):
parser.addoption('--browser_name', action='store', default='chrome',
help="Choose browser: chrome or firefox")
parser.addoption('--language', action='store', default='es',
help="Choose lang")
@pytest.fixture(scope="function")
def browser(request):
browser_name = request.config.getoption("browser_name")
user_language = request.config.getoption("language")
if browser_name == "chrome":
print("\nstart browser chrome for test...")
options = Options()
options.add_experimental_option('prefs', {'intl.accept_languages': user_language})
browser = webdriver.Chrome(options=options)
elif browser_name == "firefox":
print("\nstart browser firefox for test...")
fp = webdriver.FirefoxProfile()
fp.set_preference("intl.accept_languages", user_language)
browser = webdriver.Firefox(firefox_profile=fp)
else:
print("Browser {} still is not implemented".format(browser_name))
yield browser
print("\nquit browser...")
browser.quit()
|
[
"jkireenko137@gmail.com"
] |
jkireenko137@gmail.com
|
4b307fe96486875b343a028769a3cd2960ea4026
|
da459298c4bdbb745f4ed80ce1c9da15dd8fbb34
|
/demisto_sdk/commands/lint/resources/pylint_plugins/certified_partner_level_checker.py
|
981ddc9f3d5f604f670e1dfe61b03fc2be380a78
|
[
"MIT"
] |
permissive
|
demisto/demisto-sdk
|
af998a87523d03097f725ed8f31f6a44f4605ef2
|
3169757a2f98c8457e46572bf656ec6b69cc3a2e
|
refs/heads/master
| 2023-08-22T03:44:31.654275
| 2023-08-21T14:45:22
| 2023-08-21T14:45:22
| 219,291,269
| 63
| 75
|
MIT
| 2023-09-14T14:41:12
| 2019-11-03T11:36:13
|
Python
|
UTF-8
|
Python
| false
| false
| 8,826
|
py
|
# You can find documentation about adding new checker here:
# http://pylint.pycqa.org/en/latest/how_tos/custom_checkers.html#write-a-checker
"""
#### How to add a new check?
1. Chose the lowest support level that the checker should check.
2. Add a new checker in the `<support>_level_checker.py` file.
1. Add a new Error/ Warning message in the message list.
2. Add a new checker function which includes the actual logic of the check.
3. Add your checker function under the relevant visit/leave function so which will activate it on the node.
* For more explanation regarding pylint plugins and how to add a new checker:
http://pylint.pycqa.org/en/latest/how_tos/custom_checkers.html#write-a-checker
3. Add a new unit test for your checker inside the `test_pylint_plugin` folder under the relevant support level.
4. For error messages, add your new message number in the `test_build_xsoar_linter_py3_command` and
`test_build_xsoar_linter_py2_command` of the `command_builder_test.py` file.
5. Add the check to the `xsoar_linter_integration_test.py` test suit.
"""
import astroid
from pylint.checkers import BaseChecker
from pylint.interfaces import IAstroidChecker
# -------------------------------------------- Messages ------------------------------------------------
cert_partner_msg = {
"E9001": (
"Sys.exit use is found, Please use return instead.",
"sys-exit-exists",
"Ensure to not use sys.exit in the code.",
),
"W9005": (
"Main function wasnt found in the file, Please add main()",
"main-func-doesnt-exist",
"Please remove all prints from the code.",
),
"W9008": (
"Do not use demisto.results function. Please return CommandResults object instead.",
"demisto-results-exists",
"Do not use demisto.results function.",
),
"W9009": (
"Do not use return_outputs function. Please return CommandResults object instead.",
"return-outputs-exists",
"Do not use return_outputs function.",
),
"W9016": (
"Initialize of params was found outside of main function. Please use demisto.params() only inside main "
"func",
"init-params-outside-main",
"Initialize of params was found outside of main function. Please initialize params only inside main func",
),
"W9017": (
"Initialize of args was found outside of main function. Please use demisto.args() only inside main func",
"init-args-outside-main",
"Initialize of args was found outside of main function. Please use demisto.args() only inside main func",
),
}
class CertifiedPartnerChecker(BaseChecker):
__implements__ = IAstroidChecker
name = "certified-partner-checker"
priority = -1
msgs = cert_partner_msg
def __init__(self, linter=None):
super().__init__(linter)
self.list_of_function_names = set()
# ------------------------------------- visit functions -------------------------------------------------
"""
`visit_<node_name>` is a function which will be activated while visiting the node_name in the ast of the
python code.
When adding a new check:
1. Add a new checker function to the validations section.
2. Add the function's activation under the relevant visit function.
"""
def visit_call(self, node):
self._sys_exit_checker(node)
self._return_outputs_checker(node)
self._demisto_results_checker(node)
self._init_params_checker(node)
self._init_args_checker(node)
def visit_functiondef(self, node):
self.list_of_function_names.add(node.name)
# ------------------------------------- leave functions -------------------------------------------------
"""
`leave_<node_name>` is a function which will be activated while leaving the node_name in the ast of the
python code.
When adding a new check:
1. Add a new checker function to the validations section.
2. Add the function's activation under the relevant leave function.
* leave_module will be activated at the end of the file.
"""
def leave_module(self, node):
self._main_function(node)
# ---------------------------------------------------- Checkers ------------------------------------------------------
"""
Checker functions are the functions that have the logic of our check and should be activated in one or more
visit/leave functions.
"""
# -------------------------------------------- Call Node ---------------------------------------------
def _sys_exit_checker(self, node):
"""
Args: node which is a Call Node.
Check:
- if sys.exit() statement exists in the current node.
Adds the relevant error message using `add_message` function if one of the above exists.
"""
try:
if (
node.func.attrname == "exit"
and node.func.expr.name == "sys"
and node.args
and node.args[0].value != 0
):
self.add_message("sys-exit-exists", node=node)
except Exception:
pass
def _return_outputs_checker(self, node):
"""
Args: node which is a Call Node.
Check:
- if return_outputs() statement exists in the current node.
Adds the relevant error message using `add_message` function if one of the above exists.
"""
try:
if node.func.name == "return_outputs":
self.add_message("return-outputs-exists", node=node)
except Exception:
pass
def _demisto_results_checker(self, node):
"""
Args: node which is a Call Node.
Check:
- if demisto.results() statement exists in the current node.
Adds the relevant error message using `add_message` function if one of the above exists.
"""
try:
if node.func.attrname == "results" and node.func.expr.name == "demisto":
self.add_message("demisto-results-exists", node=node)
except Exception:
pass
def _init_params_checker(self, node):
"""
Args: node which is a Call Node.
Check:
- if demisto.params() statement exists and if its parent node is main().
Adds the relevant error message using `add_message` function if one of the above exists.
"""
try:
if node.func.attrname == "params" and node.func.expr.name == "demisto":
check_param = True
parent = node.parent
# check if main function is one of the parent nodes of the current node that contains demisto.params()
while check_param and parent:
if (
isinstance(parent, astroid.FunctionDef)
and parent.name == "main"
):
check_param = False
parent = parent.parent
if check_param:
self.add_message("init-params-outside-main", node=node)
except AttributeError:
pass
def _init_args_checker(self, node):
"""
Args: node which is a Call Node.
Check:
- if demisto.args() statement exists and if its parent node is main().
Adds the relevant error message using `add_message` function if one of the above exists.
"""
try:
if node.func.attrname == "args" and node.func.expr.name == "demisto":
check_param = True
parent = node.parent
# check if main function is one of the parent nodes of the current node that contains demisto.params()
while check_param and parent:
if (
isinstance(parent, astroid.FunctionDef)
and parent.name == "main"
):
check_param = False
parent = parent.parent
if check_param:
self.add_message("init-args-outside-main", node=node)
except AttributeError:
pass
# -------------------------------------------- Module Node ---------------------------------------------
def _main_function(self, node):
"""
Args: node which is a Call Node.
Check:
- if main() function exists in the code.
Adds the relevant error message using `add_message` function if one of the above exists.
"""
if "main" not in self.list_of_function_names:
self.add_message("main-func-doesnt-exist", node=node)
def register(linter):
linter.register_checker(CertifiedPartnerChecker(linter))
|
[
"noreply@github.com"
] |
demisto.noreply@github.com
|
5d047cf6f9557497961864d7edee842ef2bdf485
|
135d94d4d5c9f9d7cf242f9be17d8cdef2ec5c02
|
/02Python基本语法/10模块/random模块.py
|
9f6d4bd0443d35facb4a54c85dcfa68f8ee9b984
|
[] |
no_license
|
CodingHaHa/PythonBase
|
ad0982f93cfd9ffa850424538af0072076bfc8b9
|
51da00cb81851b9301045796aa3e32eb52293443
|
refs/heads/master
| 2021-01-20T21:37:29.502478
| 2017-09-27T16:05:39
| 2017-09-27T16:05:39
| 101,774,891
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 565
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by fengL on 2017/9/4
import random
#1:返回0-1直接的随机数
print(random.random())
#2:返回一个范围内的随机整数
print(random.randint(1,8))#返回1-8包括8在内的随机整数
#3:返回一个随机字符
print(random.choice("hello"))
print(random.choice([1,2,8,9,5,3]))
print(random.choices(["hello",15,"abc",1010]))
print(random.sample(["hello",15,"abc",1010],2))#指定从序列中选指定数量的元素。
print(random.randrange(1,3))#取1-2的随机数,即不包含右边界
|
[
"645528331@qq.com"
] |
645528331@qq.com
|
cb8e0c1ccc59a707870cdfaf0dcc196eab7b9667
|
654f667027a96234c9243b588a9ceba22707f0ce
|
/image_gallery/urls.py
|
903aff6b64da102aca33317f476dd02f58824020
|
[] |
no_license
|
sabsekr/ImageGallery
|
1e9c3820a5913a3e58a45ecec3055136bf3df500
|
cfa2982531bc345b6a8f052035906839347df9dc
|
refs/heads/master
| 2020-12-02T06:06:43.752484
| 2016-09-11T19:31:36
| 2016-09-11T19:31:36
| 67,949,059
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 420
|
py
|
from django.conf import settings
from django.conf.urls import url
from django.conf.urls.static import static
from . import views
"""
ex: /imagegallery/5/
ex: /imagegallery/add/
"""
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<pk>[0-9]+)/$', views.ShowPhotoView.as_view(), name='showphoto'),
url(r'^add/$', views.upload_pic, name='upload_pic'),
]
|
[
"sabsekr@gmail.com"
] |
sabsekr@gmail.com
|
ef9831a4ec3f53c650cd71d2f9e61f0cadb9b5ee
|
1c5d7d4156bf22832dac3eb80016f9e92eccce02
|
/Bens_code/sprint_1_code.py
|
837e07fd37d549aa666f27f49bf9b6d926b96a06
|
[] |
no_license
|
kaissaradi/Checkers-Survival
|
dfafb7e6ba00060ff18d6b2492f0e429ed0f1bb1
|
31dbf00c98ca8c2aa5040abeb840092e8aef895d
|
refs/heads/master
| 2022-11-25T19:52:59.960851
| 2020-07-15T01:52:05
| 2020-07-15T01:52:05
| 279,734,570
| 0
| 0
| null | 2020-07-15T01:46:41
| 2020-07-15T01:46:40
| null |
UTF-8
|
Python
| false
| false
| 2,589
|
py
|
import random
print("test file") #testing if file works
class item:
"""Item class of the game."""
def __init__(self, name, str_desc):
"""Creates an item object.\n
parameters: name, str_desc"""
self._name = name
self._str_desc = str_desc
def get_name(self):
return self._name
def set_name(self, name):
self._name = name
def description(self):
"""Returns a string description of the item."""
return self._str_desc
def set_description(self, desc):
"""Sets description of the item."""
self._str_desc = desc
def __str__(self):
return self._name + ": " + self._str_desc
class weapon(item):
"""Weapon class of the game."""
def __init__(self, name, str_desc, dmg_low, dmg_high):
"""Creates a weapon object.\n
parameters: name, str_desc, dmg_low, dmg_high"""
self._name = name
self._str_desc = str_desc
self._dmg_low = dmg_low
self._dmg_high = dmg_high
def __str__(self):
return self._name + ": " + self._str_desc + " low: " + str(self._dmg_low) + ", high:" + str(self._dmg_high)
def rand_dmg(self):
"""Returns randomized dmg value between dmg low and dmg_high (inclusive)."""
return random.randrange(self._dmg_low, self._dmg_high + 1)
def get_low(self):
"""Returns low range of damage."""
return self._dmg_low
def get_high(self):
"""Returns high range of damage."""
return self._dmg_high
class consumable(item):
"""Consumable class in game."""
def __init__(self, name, str_desc, value, use_count):
"""Creates a consumable object.\n
parameters: name, str_desc, value, use_count"""
self._name = name
self._str_desc = str_desc
self._value = value
self._use_count = use_count
def __str__(self):
return self._name + ": " + self._str_desc + " value: " + str(self._value) + ", use count:" + str(self._use_count)
def get_value(self):
"""get the value of the cosumable."""
return self._value
def get_use_count(self):
"""get the remaining use count"""
return self._use_count
#EDIT: this method should probably pass in a unit argument to change it
# def use_item(self, unit):
def use_item(self):
"""uses the item, decrementing the use count"""
self._use_count -= 1
def usable(self):
"""checks if item is usable"""
return self._use_count > 0
|
[
"benjamin.condrea@gmail.com"
] |
benjamin.condrea@gmail.com
|
645ffb55785736ab413eafd83055b6a5cf76c0ad
|
d177a5a49fcfb2fbaabb8254029f32708115e404
|
/scorch/WikiList.py
|
c78ad2fb958dba6e8df5d8fc885f52114bfeb66f
|
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
pmachapman/singrcnz
|
b985175de4322723c09898c0ef2b193845c8bf6b
|
bd099df66f4d40ecb1c201c969acba526bba8b53
|
refs/heads/master
| 2022-12-23T02:23:03.129270
| 2021-02-11T10:43:44
| 2021-02-11T10:43:44
| 194,562,171
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,132
|
py
|
#! /usr/local/bin/python
import sys, os
from glob import glob
import fnmatch
def link(song):
link = song.replace(' ', '_').replace(';', '%3b')
return 'http://hymnal.ws/public/Songs/%s.htm' % link
def listPsalms(dest, filename):
print >>sys.stderr, 'Working on "%s"' % filename
song = os.path.basename(filename).replace('.htm', '')
song = song.replace('_', ' ')
if song.lower().startswith('psalm'):
num = song[5:].split()[0]
num = str(int(num[0:3])) + num[3:]
name = ' '.join(song.split()[1:])
print >>dest, '%s: %s' % (num, name),
else:
print >>dest, song,
print >>dest, '- [%s view]' % link(song)
print >>dest, ' * -'
print >>dest
def main():
argv = sys.argv
if not sys.stdin.isatty():
argv += sys.stdin.read().split()
if not argv[1:]:
print 'Usage: WikiList.py <list-htm-files>'
print 'Wildcards are allowed'
sys.exit(1)
dest = sys.stdout
for arg in argv[1:]:
for filename in glob(arg):
listPsalms(dest, filename)
dest.close()
if __name__ == '__main__':
main()
|
[
"berwynhoyt@users.noreply.github.com"
] |
berwynhoyt@users.noreply.github.com
|
8ca06a4fb7458b5fab1dd5ea0ab800643ed04f7d
|
3cda9193890d149833ae022539d91cdb6fab9cbf
|
/api/news/models.py
|
eb1146a15ef8540de24b3e4e6b3d36b5267d40ca
|
[] |
no_license
|
ITT13021/share-your-story-backend
|
1f49d79c4326c423801618ab095f05ca2de4cd28
|
e29f592eb7b363a17d2891e4d041aff484d5b4a6
|
refs/heads/master
| 2020-03-07T21:16:47.948076
| 2018-05-08T09:04:02
| 2018-05-08T09:04:02
| 127,722,235
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,211
|
py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from api.user.models import User
class AbstractAuditCreate(models.Model):
create_user = models.ForeignKey(User, models.DO_NOTHING, related_name='%(app_label)s_%(class)s_create_user_set', blank=True, null=True, help_text=_(u'创建人'))
create_date = models.DateTimeField(auto_now_add=True, blank=True, null=True, help_text=_(u'创建时间'))
class Meta:
abstract = True
class News(AbstractAuditCreate):
title = models.CharField(blank=True, null=True, max_length=12, help_text=_(u'消息标题'))
content = models.TextField(blank=True, null=True, help_text=_(u'消息内容'))
class Meta:
app_label = 'news'
db_table = 'news'
class UserNews(models.Model):
TYPE_CHOICES = ((0, "全体用户"), (1, "部分用户"))
news = models.ForeignKey(News)
user = models.ForeignKey(User, blank=True, null=True)
type = models.SmallIntegerField(choices=TYPE_CHOICES, blank=True, null=True, help_text=_(u'消息范围'))
read = models.BooleanField(default=False)
class Meta:
app_label = 'news'
db_table = 'user_news'
|
[
"473457683@qq.com"
] |
473457683@qq.com
|
f141eec00b50fc9b1d9c129ec4d8dc3f8b6e3292
|
f56a22bd81264edf0b18ba9fa2d9aa504f7579cb
|
/main.py
|
7eaacf94041457e17b229528b285399e6511898b
|
[] |
no_license
|
omega-photonics/dragon-pyBOTDR
|
7dfa99441e0e9c44e9cd2e21e561207b4be9f67f
|
8996b474028a8404affc87f0e1ff6aa413100451
|
refs/heads/master
| 2016-09-06T11:35:46.932293
| 2012-08-07T20:28:20
| 2012-08-07T20:28:20
| 5,084,196
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 228
|
py
|
import sys
from PyQt4 import QtGui
from mainwindow import MainWindow
def main():
app = QtGui.QApplication(sys.argv)
wnd = MainWindow()
wnd.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
[
"gleb.budylin@gmail.com"
] |
gleb.budylin@gmail.com
|
2241964cc9bed0e9b197a44c20608f0c0f5af726
|
d3fab385fadb66b9d86e7c7d7094319b85a225a0
|
/pyCalculator.py
|
281ad662d4ad314b12f1c35e61b80d9c021aa036
|
[] |
no_license
|
karenmcewen/pyCalculator
|
03b8a4d628220c94607b0444fa6a95853ffb5142
|
a9a5f763dec29fe35cdeea2afd7a00b62f838c51
|
refs/heads/master
| 2020-03-19T07:15:19.948674
| 2018-06-12T00:43:42
| 2018-06-12T00:43:42
| 136,099,301
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,373
|
py
|
# this is a simple python calculator that does not use NumPy
def menu():
print('MENU')
print('1: Add +')
print('2: Subtract -')
print('3: Multiply x')
print('4: Division /')
print('5: Square x^2')
print('6: Exponent x^y')
print('7: Square Root x^0.5')
print('Press any other number to exit')
print()
def addnums(x, y):
return x + y
def subtractnums(x, y):
return x - y
def multiplynums(x, y):
return x * y
def dividenums(x, y):
if y == 0:
return "Error: Cannot divide by zero"
else:
return x/y
def squarenums(x):
return x * x
def exponentnums(x, y):
return x ** y
def squarerootnums(x):
return x ** 0.5
# main program starts here
choice = 0
while choice >= 0:
print('Welcome to my simple pyCalculator!')
print()
# catch ValueError if num1 or num2 is not a number
while True:
try:
num1 = float(input('Please enter a number: '))
num2 = float(input('Enter a second number: '))
break
except ValueError:
print("Please enter numbers only. Try again.")
menu()
# catch ValueError if choice is not an integer
while True:
try:
choice: int = int(input('What operation would you like to perform? '))
break
except ValueError:
print('You must enter an integer. Please try again.')
menu()
if choice == 1:
print(str(num1) + " + " + str(num2) + " = " + str(addnums(num1, num2)))
print()
elif choice == 2:
print(str(num1) + " - " + str(num2) + " = " + str(subtractnums(num1, num2)))
print()
elif choice == 3:
print(str(num1) + " * " + str(num2) + " = " + str(multiplynums(num1, num2)))
print()
elif choice == 4:
print(str(num1) + " / " + str(num2) + " = " + str(dividenums(num1, num2)))
print()
elif choice == 5:
print(str(num1) + " ^ 2 = " + str(squarenums(num1)))
print()
elif choice == 6:
print(str(num1) + " ^ " + str(num2) + " = " + str(exponentnums(num1, num2)))
print()
elif choice == 7:
print("The square root of " + str(num1) + " = " + str(squarerootnums(num1)))
print()
else:
choice = -1
# exit program
print()
print('Thank you for using my pyCalculator!')
print()
|
[
"karenmcewen.engineer@gmail.com"
] |
karenmcewen.engineer@gmail.com
|
2678eae76cccda14c085c8bec4b17a3fd445b084
|
cb8c110b66eb53b1d9b8a057c3f5ea2875282e76
|
/greatkart/urls.py
|
3b48a686d0e9311868f9744896b8133338a02afb
|
[] |
no_license
|
sampanar/greatkart
|
02611ee4520db85ca403b03e7daec0bf5a5bae3f
|
e3826c9adfe8b2d4838e24f1bb4000cdd6816dd6
|
refs/heads/main
| 2023-06-17T23:16:25.873164
| 2021-07-16T12:28:47
| 2021-07-16T12:28:47
| 378,828,981
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,038
|
py
|
"""greatkart URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from . import views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.home,name="home"),
path('store/',include('store.urls')),
path('cart/',include('carts.urls')),
] + static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
|
[
"sampan.arora22@gmail.com"
] |
sampan.arora22@gmail.com
|
d2df054bc8bcfdd22a460700a2a251b842cf4b38
|
1997039813917d5723e0db44f0271e3c08f401a6
|
/Python/capitlize.py
|
7f1771c5fc6cfe8cbcca7226bbcf3f1f2056bc02
|
[] |
no_license
|
7Aishwarya/HakerRank-Solutions
|
6fe0f331d1c8e9a0b4c6da662658fc3c3ab83098
|
a33740d0fbdb5a3b2984e87ea904a5e359bb8fa9
|
refs/heads/master
| 2021-07-23T18:57:09.324414
| 2020-07-07T13:33:05
| 2020-07-07T13:33:05
| 194,402,004
| 8
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 752
|
py
|
'''You are asked to ensure that the first and last names of people begin with a capital letter in their passports.
For example, alison heck should be capitalised correctly as Alison Heck.
Given a full name, your task is to capitalize the name appropriately.'''
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the solve function below.
def solve(s):
l=list(s)
n = ""
for i in range(len(l)):
if l[i]==" ":
l[i+1] = l[i+1].upper()
if i == 0:
l[i] = l[i].upper()
n+=l[i]
return n
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
s = input()
result = solve(s)
fptr.write(result + '\n')
fptr.close()
|
[
"7aishwaryasharma@gmail.com"
] |
7aishwaryasharma@gmail.com
|
eea76d555f147fc3a42c95e7b875e4c2f188b39b
|
ea41c6aecd5613551e4d689441227927726c5956
|
/twitter_client/twitter_full.py
|
cfb2842668eaf6cb75f1ab7478899d9f2cd54877
|
[] |
no_license
|
filipcima/spja-course
|
e9efcaec5e419e89743e197b86e6d7a04172e494
|
da1297850eddd2ed3ab853d50794e61bb4cdbb69
|
refs/heads/master
| 2021-05-07T07:51:35.305801
| 2017-11-06T10:34:53
| 2017-11-06T10:34:53
| 109,270,716
| 0
| 0
| null | 2017-11-02T13:48:53
| 2017-11-02T13:48:53
| null |
UTF-8
|
Python
| false
| false
| 1,907
|
py
|
import json
import re
import urllib.request, urllib.error, urllib.parse
import tc_auth
PINK = '\033[95m'
RESET = '\033[0m'
BLUE = '\033[94m'
GREEN = '\033[92m'
WARNING = '\033[93m'
RED = '\033[91m'
class Tweet(object):
def __init__(self, from_user, text, geo):
self.from_user = from_user
self.text = text
self.geo = geo
def __repr__(self):
tags = re.findall(r"\#\w+", self.text)
text = self.text
for tag in tags:
text = text.replace(tag, GREEN+tag+RESET)
out = "{0}, Text: {1}, Geo: {2}".format(self.from_user, text, self.geo)
return PINK + "From: " + RESET + out
class Twitter(object):
base_search_url = 'https://api.twitter.com/1.1/search/tweets.json?q={}&count=5&tresult_type=popular'
def __init__(self, *search):
self.client = tc_auth.twitter_auth()
self.search = search
self.decoded_json = None
def create_search_url(self):
escaped_search = []
for word in self.search:
escaped_search.append(word.replace('#', '%23'))
search_str = '%20'.join(escaped_search)
return Twitter.base_search_url.format(search_str)
def download(self):
url = self.create_search_url()
self.response, self.data = self.client.request(url)
def decode_json(self):
self.decoded_json = json.loads(self.data.decode('utf-8'))
def get_tweets(self):
self.download()
self.decode_json()
statuses = self.decoded_json['statuses']
tweets = []
for status in statuses:
from_user = status['user']['name']
text = status['text']
geo = status['geo']
tweet = Tweet(from_user, text, geo)
tweets.append(tweet)
return tweets
t = Twitter('#django', '#python')
tweets = t.get_tweets()
for tweet in tweets:
print(tweet)
|
[
"jan.gaura@gmail.com"
] |
jan.gaura@gmail.com
|
2f89a47fc5df0e949f3b465976d52f2158fbeed9
|
0c80a8a8cc8cb2b8ed552b1f2cde7bd8fe14c4f4
|
/SmallLedgerSite/migrations/0004_auto_20190810_1052.py
|
66313fccae993881b79a60f19137d8508a144c59
|
[] |
no_license
|
Tenchrio/smallledger
|
489498dce7f4b93f48790ae29453a8ad773cb044
|
15296a8f7351e1dc94ae04bf9c2ebb94e001d1fb
|
refs/heads/master
| 2022-05-16T03:04:57.871955
| 2019-09-10T19:12:25
| 2019-09-10T19:12:25
| 203,612,051
| 0
| 0
| null | 2022-04-22T22:12:28
| 2019-08-21T15:22:52
|
Python
|
UTF-8
|
Python
| false
| false
| 445
|
py
|
# Generated by Django 2.2.4 on 2019-08-10 10:52
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('SmallLedgerSite', '0003_auto_20190810_0326'),
]
operations = [
migrations.AlterField(
model_name='item',
name='boughtDate',
field=models.DateField(default=django.utils.timezone.now),
),
]
|
[
"chrisvannooten@hotmail.com"
] |
chrisvannooten@hotmail.com
|
1e044eaac95ce45cb6c996e9cae893b570490893
|
428f788e24387b8d0c71a04d221bf40dc85840e1
|
/Methods/kernels.py
|
6c53720a27fb039fea83258eca442183bdd38727
|
[] |
no_license
|
wendixuan/Thesis_Project
|
08f382f5ba922b377148d50a60beaeb340dec1cd
|
efd00f349a2dff1909f17ee9455379bf6ebeb1d2
|
refs/heads/master
| 2020-06-30T19:04:19.500412
| 2016-09-05T20:07:16
| 2016-09-05T20:07:16
| 67,286,957
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 28,271
|
py
|
######################
#
#This code was modified basedon the code of 'Kernels for sequentially ordered data' written by F. J Kiraly and H. Oberhauser.
#
#############################################################
######################
#
#This code can be use to do SVC with bag of features kernel, sequential kernel and a simple version of s-sequential kernel
#
######################
#Addition:
#(1) small_rev():modified funtion of cumsum_rev(), which only consider the diagonal of matrix
#(2) SquizeKernel_D(): the funtion for computing s-sequential kernel values between path 1 and path 2.
#(3) BagSeqKernel(): the funtion for computing bag of feature kernel values between path 1 and path 2.
#(4) BagKernelizer(): a class for computing kernel matrix with the bag of feature
#(5) BagSVCpipeline():pipeline for Bag of feature kernel with SVC
#(6) BagKernelXY() is a function for computing the values of bag of features kernel for two sample pf time series in consistent or inconsistent lengths.
#Modifications are in:
#(1)SeqKernelXY() is modified to be able to work on time series in inconsistent lengths
# and be able to compute s-sequential kernel as well (when Dia=True).
#(2)SeqKernelizer() is modified to be able to compute sequential kernel (Dia=False ) and s-sequential kernel(Dia=True) and be able to work with time series in inconsistent lengths (V=True)
# Notice that when V=True, the dataset should include the vector of original length in the last column of the 2D array.
#####################
import numpy as np
from scipy.sparse.linalg import svds
# In[]
# sqdist
def sqdist(X,Y):
M = np.shape(X)[0]
N = np.shape(Y)[0]
return np.tile((X*X).sum(-1),[N,1]).T + np.tile((Y*Y).sum(-1),[M,1]) - 2*np.inner(X,Y)
# In[]
# cumsum varia
def cumsum_rev_first(array):
out = np.zeros_like(array)
out[:-1] = np.cumsum(array[:0:-1],0)[::-1]
return out
def cumsum_rev(array):
out = np.zeros_like(array)
out[:-1, :-1] = np.cumsum(np.cumsum(array[:0:-1, :0:-1], 0), 1)[::-1, ::-1]
return out
#Modified funtion of cumsum_rev(), which only consider the diagonal of matrix
def small_rev(array):
out = np.zeros_like(array)
a=array[:0:-1, :0:-1]
if a.size>0:
np.fill_diagonal(a,a.diagonal().cumsum())
out[:-1, :-1] = a[::-1, ::-1]
return out
def cumsum_mult(array, dims):
for dimind in dims:
array = np.cumsum(array, axis = dimind)
return array
def roll_mult(array, shift, dims):
for dimind in dims:
array = np.roll(array, shift, axis = dimind)
return array
def makeinds(indlist):
return np.ix_(*indlist)
def cumsum_shift_mult(array, dims):
array = cumsum_mult(array,dims)
array = roll_mult(array,1,dims)
arrayshape = array.shape
indarr = []
for ind in range(len(arrayshape)):
indarr = indarr + [range(arrayshape[ind])]
for dimind in dims:
slicearr = indarr[:]
slicearr[dimind] = [0]
array[makeinds(slicearr)] = 0
return array
def rankreduce(array,rankbound):
arraysvd = svds(array.astype('f'), k = rankbound)
return np.dot(arraysvd[0],np.diag(arraysvd[1]))
def rankreduce_batch(arrays,rankbound):
resultarrays = np.zeros([arrays.shape[0],arrays.shape[1],rankbound])
for i in range(arrays.shape[0]):
resultarrays[i,:,:] = rankreduce(arrays[i,:,:], rankbound)
return resultarrays
# In[]
# x ((obs1,dim)) and y ((obs2,dim)) numpy arrays
#return ((obs1,obs2)) array with kernel as entries
kPolynom = lambda x,y,scale,deg : (1+scale*np.inner(x,y))**deg
kGauss = lambda x,y,scale: np.exp(-(scale**2)*sqdist(x,y)/2)
kEuclid = lambda x,y,scale: scale*np.inner(x,y)
kLaplace = lambda x,y,scale: np.exp(-scale*np.sqrt(np.inner(x-y,x-y)))
kTanH = lambda x,y,off,scale: np.tanh(off+scale*np.inner(x,y))
# In[]
# FUNCTION mirror
# mirrors an upper triangular kernel matrix, helper for SqizeKernel
mirror = lambda K: K-np.diag(np.diag(K))+np.transpose(K)
# FUNCTION SquizeKernel
# computes the sequential kernel from a sequential kernel matrix
#
# Inputs:
# K the kernel matrix of increments, i.e.,
# K[i,j] is the kernel between the i-th increment of path 1,
# and the j-th increment of path 2
# L an integer \geq 1, representing the level of truncation
# optional:
# theta a positive scaling factor for the levels, i-th level by theta^i
# normalize whether the output kernel matrix is normalized
# defaults: theta = 1.0, normalize = False
#
# Output:
# a real number, the sequential kernel between path 1 and path 2
#
def SqizeKernel(K, L, theta = 1.0, normalize = False):
#L-1 runs through loop;
#returns R_ij=(1+\sum_i2>i,j2>j A_i2,j2(1+\sum A_iLjL)...)
if normalize:
normfac = np.prod(K.shape)
I = np.ones(K.shape)
R = np.ones(K.shape)
for l in range(L-1):
R = (I + theta*cumsum_rev(K*R)/normfac)/(1+theta)
return (1 + theta*np.sum(K*R)/normfac)/(1+theta)
else:
I = np.ones(K.shape)
R = np.ones(K.shape)
for l in range(L-1):
R = I + cumsum_rev(K*R) #A*R is componentwise
return 1 + np.sum(K*R) #outermost bracket: since i1>=1 and not i1>1 we do it outside of loop
# FUNCTION SquizeKernel_D
# computes the values of s-sequential kernel matrix
#
# Inputs:
# K the kernel matrix of increments, i.e.,
# K[i,j] is the kernel between the i-th increment of path 1,
# and the j-th increment of path 2
# L an integer \geq 1, representing the level of truncation
# optional:
# theta a positive scaling factor for the levels, i-th level by theta^i
# normalize whether the output kernel matrix is normalized
# defaults: theta = 1.0, normalize = False
#
# Output:
# a real number, the s-sequential kernel between path 1 and path 2
#
def SqizeKernel_D(K, L, theta = 1.0, normalize = False):
#L-1 runs through loop;
#returns R_ij=(1+\sum_i2>i,j2>j A_i2,j2(1+\sum A_iLjL)...)
if normalize:
normfac = np.prod(K.shape)
I = np.ones(K.shape)
R = np.ones(K.shape)
for l in range(L-1):
R = (I + theta*small_rev(K*R)/normfac)/(1+theta)
return (1 + theta*np.sum(K*R)/normfac)/(1+theta)
else:
I = np.ones(K.shape)
R = np.ones(K.shape)
for l in range(L-1):
R = I + small_rev(K*R) #A*R is componentwise
return 1 + np.sum(K*R) #outermost bracket: since i1>=1 and not i1>1 we do it outside of loop
# FUNCTION BagKernel
# computes the bag of features kernel from a sequential kernel matrix
#
# Inputs:
# K the kernel matrix of increments, i.e.,
# K[i,j] is the kernel between the i-th increment of path 1,
# and the j-th increment of path 2
#
# Output:
# a real number, the sequential kernel between path 1 and path 2
#
def BagSeqKernel(K):
normfac = np.prod(K.shape)#the total number of elements in two paths
return np.sum(K)/normfac
# In[]
# FUNCTION SquizeKernelHO
# computes the higher order sequential kernel from a sequential kernel matrix
#
# Inputs:
# K the kernel matrix of increments, i.e.,
# K[i,j] is the kernel between the i-th increment of path 1,
# and the j-th increment of path 2
# L an integer \geq 1, representing the level of truncation
# D an integer \geq 1, representing the order of approximation
# optional:
# theta a positive scaling factor for the levels, i-th level by theta^i
# normalize whether the output kernel matrix is normalized
# defaults: theta = 1.0, normalize = False
#
# Output:
# a real number, the sequential kernel between path 1 and path 2
#
def SqizeKernelHO(K, L, D = 1, theta = 1.0, normalize = False):
A = np.zeros(np.concatenate(([L,D,D],K.shape)))
I = np.ones(K.shape)
for l in range(1,L):
Dprime = min(D, l)
A[l,0,0,:,:] = K*(I + cumsum_shift_mult(np.sum(A[l-1,:,:,:,:],(0,1)),(0,1) ) )
for d1 in range(1,Dprime):
A[l,d1,0,:,:] = A[l,d1,0,:,:] + (1/d1)*K*cumsum_shift_mult(np.sum(A[l-1,d1-1,:,:,:],0),(1))
A[l,:,d1,:,:] = A[l,0,d1,:,:] + (1/d1)*K*cumsum_shift_mult(np.sum(A[l-1,:,d1-1,:,:],0),(0))
for d2 in range(1,Dprime):
A[l,d1,d2,:,:] = A[l,d1,d2,:,:] + (1/(d1*d2))*K*cumsum_shift_mult(np.sum(A[l-1,d1-1,d2-1,:,:],0),(0))
return 1 + np.sum(A[L-1,:,:,:,:])
# In[]
import collections
# low-rank decomposition
# models matrix A = U x V.T
# U and V should be *arrays*, not *matrices*
LRdec = collections.namedtuple('LRdec', ['U','V'])
# FUNCTION GetLowRankMatrix
# produce the matrix from the LRdec object
#
# Inputs:
# K a LRdec type object
#
# Output:
# the matrix K.U x K.V.T modelled by the LRdec object
def GetLowRankMatrix(K):
return np.inner(K.U, K.V)
# FUNCTION AddLowRank
# efficient computation of sum of low-rank representations
# using this and then GetLowRankMatrix is more efficient than an
# explicit computation if the rank of the final matrix is not full
#
# Inputs:
# K, R LRdec type objects to add
#
# Output:
# LRdec type object for sum of K and R
def AddLowRank(K, R):
return LRdec(np.concatenate((K.U,R.U), axis=1),np.concatenate((K.V,R.V), axis=1))
def AddLowRankOne(U, P):
return np.concatenate((U,P), axis=1)
def MultLowRank(K, theta):
return LRdec(theta*K.U, theta*K.V)
# FUNCTION HadamardLowRank
# efficient computation of Hadamard product of low-rank representations
# using this and then GetLowRankMatrix is more efficient than an
# explicit computation if the rank of the final matrix is not full
#
# Inputs:
# K, R LRdec type objects to multiply
#
# Output:
# LRdec type object for Hadamard product of K and R
def HadamardLowRank(K, R):
rankK = K.U.shape[1]
rankR = R.U.shape[1]
U = (np.tile(K.U,rankR)*np.repeat(R.U,rankK,1))
V = (np.tile(K.V,rankR)*np.repeat(R.V,rankK,1))
return LRdec(U,V)
# multiplies U with every component (1st index) of P
#def HadamardLowRankBatch(U, P):
# rankU = U.shape[1]
# N = P.shape[0]
# rankP = P.shape[2]
# return (np.repeat(np.repeat(np.array(U,ndmin = 3), rankP, 2),N,0)*np.repeat(P,rankU,2))
# multiplies U and P component-wise (1st)
def HadamardLowRankBatch(U, P):
rankU = U.shape[2]
rankP = P.shape[2]
return (np.tile(U,rankP)*np.repeat(P,rankU,2))
# with Nystroem type subsampling
def HadamardLowRankSubS(U, P, rho):
rankU = U.shape[2]
rankP = P.shape[2]
permut = np.sort(np.random.permutation(range(rankU*rankP))[range(rho)])
return (np.tile(U,rankP)*np.repeat(P,rankU,2))[:,:,permut]
# FUNCTION cumsum_LowRank
# cumsum for LRdec type collections
# equivalent of cumsum_rev for LRdec type objects
#
# Inputs:
# K LRdec type object to cumsum
#
# Output:
# LRdec type object for cumsum_rev of K
def cumsum_LowRank(K):
return LRdec(cumsum_rev_first(K.U),cumsum_rev_first(K.V))
# FUNCTION sum_LowRank
# sum for LRdec type collections
# equivalent of sum_rev for LRdec type objects
#
# Inputs:
# K LRdec type object to sum
#
# Output:
# LRdec type object for sum of K
def sum_LowRank(K):
return np.inner(sum(K.U),sum(K.V))
# FUNCTION SquizeKernelLowRank
# computes the sequential kernel from a sequential kernel matrix
# faster by using a low-rank approximation
#
# Inputs:
# K LRdec type object, models low-rank factors
# of the increment kernel matrix K such that K = K.U x K.V.T
# where K[i,j] is the kernel between the i-th increment of path 1,
# and the j-th increment of path 2
# L an integer \geq 1, representing the level of truncation
# optional:
# theta a positive scaling factor for the levels, i-th level by theta^i
# normalize whether the output kernel matrix is normalized
# rankbound a hard threshold for the rank of the level matrices
# defaults: theta = 1.0, normalize = False, rankbound = infinity
#
# Output:
# a real number, the sequential kernel between path 1 and path 2
#
def SqizeKernelLowRank(K, L, theta = 1.0, normalize = False, rankbound = float("inf")):
#L-1 runs through loop;
#returns R_ij=(1+\sum_i2>i,j2>j A_i2,j2(1+\sum A_iLjL)...)
if normalize:
K = GetLowRankMatrix(K)
normfac = np.prod(K.shape)
I = np.ones(K.shape)
R = np.ones(K.shape)
for l in range(L-1):
R = (I + theta*cumsum_rev(K*R)/normfac)/(1+theta)
return (1 + theta*np.sum(K*R)/normfac)/(1+theta)
else:
I = LRdec(np.ones([K.U.shape[0],1]),np.ones([K.V.shape[0],1]))
# I = np.ones(K.shape)
R = I
for l in range(L-1):
#todo: execute only if rank is lower than rankbound
# reduce to rank
R = AddLowRank(I,MultLowRank(cumsum_LowRank(HadamardLowRank(K,R)),theta))
#R=I + cumsum_rev(K*R)
return 1 + theta*sum_LowRank(HadamardLowRank(K,R))
# return 1 + np.sum(K*R)
#outermost bracket: since i1>=1 and not i1>1 we do it outside of loop
# FUNCTION SquizeKernelLowRankFast
# computes the sequential kernel from a sequential kernel matrix
# faster by using a low-rank approximation
#
# Inputs:
# K Array of dimension 3, containing joint low-rank factors
# 1st index counts sequences
# 2nd index counts time
# 3rd index counts features
# so K[m,:,:] is the mth factor,
# and K[m,:,:] x K[m,:,:]^t is the kernel matrix of the mth factor
# L an integer \geq 1, representing the level of truncation
# optional:
# theta a positive scaling factor for the levels, i-th level by theta^i
# normalize whether the output kernel matrix is normalized
# rankbound a hard threshold for the rank of the level matrices
# defaults: theta = 1.0, normalize = False, rankbound = infinity
#
# Output:
# a matrix R such that R*R^t is the sequential kernel matrix
#
def SqizeKernelLowRankFast(K, L, theta = 1.0, normalize = False, rankbound = float("inf")):
if normalize:
Ksize = K.shape[0]
B = np.ones([Ksize,1,1])
R = np.ones([Ksize,1])
for l in range(L):
P = np.sqrt(theta)*HadamardLowRankBatch(K,B)/Ksize
B = cumsum_shift_mult(P,[1])
if rankbound < B.shape[2]:
#B = rankreduce_batch(B,rankbound)
permut = np.sort(np.random.permutation(range(B.shape[2]))[range(rankbound)])
B = B[:,:,permut]
R = np.concatenate((R,np.sum(B,axis = 1)), axis=1)/(np.sqrt(1+theta))
return R
else:
Ksize = K.shape[0]
B = np.ones([Ksize,1,1])
R = np.ones([Ksize,1])
for l in range(L):
#todo: execute only if rank is lower than rankbound
# reduce to rank
P = np.sqrt(theta)*HadamardLowRankBatch(K,B)
B = cumsum_shift_mult(P,[1])
if rankbound < B.shape[2]:
#B = rankreduce_batch(B,rankbound)
permut = np.sort(np.random.permutation(range(B.shape[2]))[range(rankbound)])
B = B[:,:,permut]
R = np.concatenate((R,np.sum(B,axis = 1)), axis=1)
return R
# In[]
# FUNCTION SeqKernel
# computes the sequential kernel matrix for a dataset of time series
def SeqKernel(X,kernelfun,L=2,D=1,theta=1.0,normalize = False,lowrank = False,rankbound = float("inf")):
N = np.shape(X)[0]
KSeq = np.zeros((N,N))
if not(lowrank):
if D == 1:
for row1ind in range(N):
for row2ind in range(row1ind+1):
K=kernelfun(X[row1ind].T,Y[row2ind].T)
KSeq[row1ind,row2ind] = SqizeKernel(K,L,theta,normalize)
else:
for row1ind in range(N):
for row2ind in range(row1ind+1):
KSeq[row1ind,row2ind] = SqizeKernelHO(kernelfun(X[row1ind].T,X[row2ind].T),L,D,theta,normalize)
else:
R = SqizeKernelLowRankFast(X.transpose([0,2,1]), L, theta, normalize)
KSeq = np.inner(R,R)
# todo: kernelfun gives back a LRdec object
# for now, linear low-rank approximation is done
# KSeq[row1ind,row2ind] = SqizeKernelLowRank(kernelfun(X[row1ind].T,X[row2ind].T),L,theta,normalize = True)
return mirror(KSeq)
def BagKernel(X,kernelfun,xint=[]):
N = np.shape(X)[0]#the number of sample
KSeq = np.zeros((N,N))#Initial kernel matrix
#Using Bag of feature kernel
for row1ind in range(N):
for row2ind in range(row1ind+1):
K=kernelfun(X[row1ind].T,Y[row2ind].T)
KSeq[row1ind,row2ind] = BagSeqKernel(K[:xint[row1ind],:xint[row1ind]])
return mirror(KSeq)
# FUNCTION SeqKernel
# computes sequential cross-kernel matrices
def SeqKernelXY(X,Y,kernelfun,L=2,D=1,theta=1.0,normalize = False,lowrank = False,rankbound = float("inf"),xint=[],yint=[],Dia=False):
N = np.shape(X)[0]
M = np.shape(Y)[0]
KSeq = np.zeros((N,M))
if not(lowrank):
if D == 1:
if Dia:
for row1ind in range(N):
for row2ind in range(row1ind+1):
K=kernelfun(X[row1ind].T,Y[row2ind].T)
KSeq[row1ind,row2ind] = SqizeKernel(K[:xint[row1ind],:yint[row2ind]],L,theta,normalize)
else:
for row1ind in range(N):
for row2ind in range(M):
K=kernelfun(X[row1ind].T,Y[row2ind].T)
KSeq[row1ind,row2ind] = SqizeKernel(K[:xint[row1ind],:yint[row2ind]],L,theta,normalize)
else:
for row1ind in range(N):
for row2ind in range(M):
KSeq[row1ind,row2ind] = SqizeKernelHO(kernelfun(X[row1ind].T,Y[row2ind].T),L,D,theta,normalize)
else:
KSeq = np.inner(SqizeKernelLowRankFast(X.transpose([0,2,1]), L, theta, normalize, rankbound),SqizeKernelLowRankFast(Y.transpose([0,2,1]), L, theta, normalize, rankbound))
#KSeq = np.inner(SqizeKernelLowRankFast(X, L, theta, normalize),SqizeKernelLowRankFast(Y, L, theta, normalize))
return KSeq
# FUNCTION BagKernelXY
# computes sequential cross-kernel matrices
def BagKernelXY(X,Y,kernelfun,xint,yint):
N = np.shape(X)[0]
M = np.shape(Y)[0]
KSeq = np.zeros((N,M))
for row1ind in range(N):
for row2ind in range(M):
K=kernelfun(X[row1ind].T,Y[row2ind].T)
KSeq[row1ind,row2ind] = BagSeqKernel(K[:xint[row1ind],:yint[row2ind]])
return KSeq
# FUNCTION SeqKernelXY_D
# computes sequential cross-kernel matrices
# In[]
# FUNCTION DataTabulator(X)
def DataTabulator(X):
Xshape = np.shape(X)
return np.reshape(X,(Xshape[0],np.prod(Xshape[1:])))
# In[]
# FUNCTION TimeSeriesReshaper
# makes a 3D time series array out of a 2D data array
def TimeSeriesReshaper(Xflat, numfeatures, subsample = 1, differences = True):
flatXshape = np.shape(Xflat)
Xshape = (flatXshape[0], numfeatures, flatXshape[1]/numfeatures)
X = np.reshape(Xflat,Xshape)[:,:,::subsample]
if differences:
return np.diff(X)
else:
return X
# In[3]
# CLASS SeqKernelizer
# pipelines pre-processing of a time series datset with support vector classifier
#
# parameters:
# Level, theta: parameters in of the sequentialization
# Level = cut-off degree
# theta = scaling factor
# kernel, scale, deg: parameter for the primary kernel
# kernel = name of the kernel used: linear, Gauss, Laplace, poly, GA(one type of Gaussian kernel)
# scale = scaling constant, multiplicative to scalar product
# deg = degree, for polynomial kernel
# subsample, numfeatures, differences:
# pre-processing parameters for time series.
# numfeatures = number of features per time point, for internal reshaping
# subsample = time series is subsampled to every subsample-th time point
# differences = whether first differences are taken or not
# lowrank = whether low-rank approximations are used or not
# New addition:
## V: True=time series in inconsistent length
# Dia: True=a simple version of s-sequential kernel False=Sequential kernel
#
from sklearn.base import BaseEstimator, TransformerMixin
class SeqKernelizer(BaseEstimator, TransformerMixin):
def __init__(self, Level = 2, Degree = 1, theta = 1, kernel = 'linear',
scale = 1, deg = 2, X = np.zeros((1,2)), V=False,
numfeatures = 2, subsample = 100, differences = True, Dia = False,
normalize = False, lowrank = False, rankbound = float("inf")):
self.Level = Level
self.Degree = Degree
self.theta = theta
self.subsample = subsample
self.kernel = kernel
self.scale = scale
self.deg = deg
self.numfeatures = numfeatures
self.differences = differences
self.normalize = normalize
self.lowrank = lowrank
self.rankbound = rankbound
self.X = X
self.V=V
self.Dia=Dia
def fit(self, X, y=None):
if self.V:
t=TimeSeriesReshaper(X[:,:-1],self.numfeatures,self.subsample,self.differences)
v=(X[:,-1]-X[:,-1]%self.subsample)/self.subsample
self.X=(t,v)
else:
t=TimeSeriesReshaper(X,self.numfeatures,self.subsample,self.differences)
n=int(t.shape[2]/self.subsample)
v=n*np.ones(t.shape[0])
self.X=(t,v)
return self
def transform(self, Y):
if self.V:
y=TimeSeriesReshaper(Y[:,:-1],self.numfeatures,self.subsample,self.differences)
yt=(Y[:,-1]-Y[:,-1]%self.subsample)/self.subsample
Y=(y,yt)
else:
y= TimeSeriesReshaper(Y,self.numfeatures,self.subsample,self.differences)
n=int(y.shape[2]/self.subsample)
yt=n*np.ones(y.shape[0])
Y=(y,yt)
kPolynom = lambda x,y,scale,deg : (1+scale*np.inner(x,y))**deg
kGauss = lambda x,y,scale: np.exp(-(scale**2)*sqdist(x,y)/2)
kGA= lambda x,y,scale: np.exp(-(sqdist(x,y)/(2*(scale**2))+np.log(2-np.exp(-sqdist(x,y)/(2*(scale**2))))))
kEuclid = lambda x,y,scale: scale*np.inner(x,y)
kLaplace = lambda x,y,scale: np.exp(-scale*np.sqrt(np.inner(x-y,x-y)))
def kernselect(kername):
switcher = {
'linear': lambda x,y: kEuclid(x,y,self.scale),
'Gauss': lambda x,y: kGauss(x,y,self.scale),
'GA': lambda x,y: kGA(x,y,self.scale),
'Laplace': lambda x,y: kLaplace(x,y,self.scale),
'poly': lambda x,y: kPolynom(x,y,self.scale,self.deg),
}
return switcher.get(kername, "nothing")
KSeq = SeqKernelXY(Y[0],self.X[0],kernselect(self.kernel),self.Level,self.Degree,self.theta,self.normalize,self.lowrank,self.rankbound,Y[1]-1,self.X[1]-1,self.Dia)
return KSeq
# In[]
# CLASS TimeSeriesPreprocesser
# for pre-processing of time series type features
#
# parameters:
# numfeatures = number of features per time point, for internal reshaping
# subsample = time series is subsampled to every subsample-th time point
# differences = whether first differences are taken or not
#
class TimeSeriesPreprocesser(BaseEstimator, TransformerMixin):
def __init__(self, numfeatures = 2, subsample = 100, scale = 1, differences = True):
self.subsample = subsample
self.numfeatures = numfeatures
self.scale = scale
self.differences = differences
def fit(self, X, y=None):
return self
def transform(self, Y):
Y = self.scale*TimeSeriesReshaper(Y,self.numfeatures,self.subsample,self.differences)
return DataTabulator(Y)
#Class Bagkernelizer
# parameters:
# kernel = name of the kernel used: linear, Gauss, Laplace, poly,GA(one type of Gaussian kernel)
# scale = scaling constant, multiplicative to scalar product
# deg = degree, for polynomial kernel
# numfeatures = number of features per time point, for internal reshaping
# subsample = time series is subsampled to every subsample-th time point
# differences = whether first differences are taken or not
# V: True=time series in inconsistent length
class BagKernelizer(BaseEstimator, TransformerMixin):
def __init__(self,kernel = 'linear',
scale = 1, deg = 2, X=0,V=False,
numfeatures = 2, subsample = 100, differences = True
):
self.subsample = subsample
self.kernel = kernel
self.scale = scale
self.deg = deg
self.numfeatures = numfeatures
self.differences = differences
self.X = X
self.V=V
def fit(self, X, y=None):
if self.V:
t=TimeSeriesReshaper(X[:,:-1],self.numfeatures,self.subsample,self.differences)
v=(X[:,-1]-X[:,-1]%self.subsample)/self.subsample
self.X=(t,v)
else:
t=TimeSeriesReshaper(X,self.numfeatures,self.subsample,self.differences)
n=int(t.shape[2]/self.subsample)
v=n*np.ones(t.shape[0])
self.X=(t,v)
return self
def transform(self,Y):
if self.V:
y=TimeSeriesReshaper(Y[:,:-1],self.numfeatures,self.subsample,self.differences)
yt=(Y[:,-1]-Y[:,-1]%self.subsample)/self.subsample
Y=(y,yt)
else:
y= TimeSeriesReshaper(Y,self.numfeatures,self.subsample,self.differences)
n=int(y.shape[2]/self.subsample)
yt=n*np.ones(y.shape[0])
Y=(y,yt)
kPolynom = lambda x,y,scale,deg : (1+scale*np.inner(x,y))**deg
kGauss = lambda x,y,scale: np.exp(-(scale**2)*sqdist(x,y)/2)
kGA= lambda x,y,scale: np.exp(-(sqdist(x,y)/(2*(scale**2))+np.log(2-np.exp(-sqdist(x,y)/(2*(scale**2))))))
kEuclid = lambda x,y,scale: scale*np.inner(x,y)
kLaplace = lambda x,y,scale: np.exp(-scale*np.sqrt(np.inner(x-y,x-y)))
def kernselect(kername):
switcher = {
'linear': lambda x,y: kEuclid(x,y,self.scale),
'Gauss': lambda x,y: kGauss(x,y,self.scale),
'GA': lambda x,y: kGA(x,y,self.scale),
'Laplace': lambda x,y: kLaplace(x,y,self.scale),
'poly': lambda x,y: kPolynom(x,y,self.scale,self.deg),
}
return switcher.get(kername, "nothing")
KSeq = BagKernelXY(Y[0],self.X[0],kernselect(self.kernel),Y[1]-1,self.X[1]-1)
return KSeq
# In[]
from sklearn import svm
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
# In[]
# pipeline: sequential kernel with SVC
SeqSVCpipeline = Pipeline([
('SeqKernelizer', SeqKernelizer()),
('svc', svm.SVC(kernel = 'precomputed'))
])
#pipeline:Bag of feature kernel with SVC
BagSVCpipeline=Pipeline([
('BagKernelizer', BagKernelizer()),
('svc', svm.SVC(kernel = 'precomputed'))
])
# pipeline: pre-processing and SVC, no sequential kernel - baseline
TimeSVCpipeline = Pipeline([
('TimeSeriesPP', TimeSeriesPreprocesser()),
('svc', svm.SVC())
])
# pipeline: pre-processing, standardization, and SVC, no sequential kernel - baseline
TimeStdSVCpipeline = Pipeline([
('TimeSeriesPP', TimeSeriesPreprocesser()),
('standardize', StandardScaler()),
('svc', svm.SVC())
])
|
[
"noreply@github.com"
] |
wendixuan.noreply@github.com
|
e8ce66cb451166e57756c3b243911ae9d5f057c7
|
3948f21c00c4eb31d1a31f702f2ab44b4b5d53b5
|
/fabfile/aws_ssh.py
|
d8e5edc409f3a53433592c8ae5263215adb68e63
|
[] |
no_license
|
cwurld/Django-Fabric-AWS
|
f85b24fffd8e3b7e85e26a99f36080aa9bf547a3
|
621851ccc3d48569f97005ab7050989f5c77aed7
|
refs/heads/master
| 2021-01-20T23:45:12.363771
| 2014-09-03T23:16:00
| 2014-09-03T23:16:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 371
|
py
|
#!/usr/bin/env python
import subprocess
from project_conf import fabconf
if len(fabconf['EC2_INSTANCES']) == 0:
print "Error: you need to add the instance domain name to project_conf.py"
else:
cmd = 'ssh -i ~/.ssh/%s %s@%s' % (fabconf['EC2_KEY_NAME'], fabconf['SERVER_USERNAME'], fabconf['EC2_INSTANCES'][0])
print cmd
subprocess.call(cmd,shell=True)
|
[
"cwurld@yahoo.com"
] |
cwurld@yahoo.com
|
4a932772a4f8979c35efa0e079839b3304fb886b
|
7fd955ef5e793bde3d16a73b16df1d8a9063175a
|
/Function_201704013_박현주/function.py
|
32686c10c7031c8120ced64245fd39422918367a
|
[] |
no_license
|
hjpark1397/Python_practice
|
7decb1a31c6cda03deca52c2ca3ad8bb4e3e5c3f
|
5edce56eb9d50bb62668b7ebc24c8ba9e31e6bc6
|
refs/heads/master
| 2020-05-04T20:08:59.556420
| 2019-12-24T04:46:52
| 2019-12-24T04:46:52
| 179,424,081
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 94
|
py
|
def f(x):
return 2*x+7
def g(x):
return x**2
x=2
print(f(x)+g(x)+f(g(x))+g(f(x)))
|
[
"hjpark1397@naver.com"
] |
hjpark1397@naver.com
|
d4a2c585f89207cf0f85b769520fcfeeed9e915f
|
283bbebacdbc028d05949d6b65cb69046f59061a
|
/Project_3-Basic Algorithms/problem_1.py
|
6a5534b8bcb587185988666e42fbe0f7ceaf8684
|
[] |
no_license
|
mohitgureja/DSA_Nanodegree-Python
|
f324e02fe9f7a4711383066d38223720ea5b7a23
|
4267183df499e05efd3d528114d0c2c42aec1029
|
refs/heads/master
| 2021-05-23T09:34:25.233681
| 2020-07-25T11:13:23
| 2020-07-25T11:13:23
| 253,223,318
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,059
|
py
|
def sqrt(number):
# if number is 1 return 1 as the number 1 is square of itself
if (number == 1):
return 1
# set globally max perfect square value and max Floor value to 0
global maxSquareValue
maxSquareValue = 0
global maxFloorValue
maxFloorValue = 0
def findSqrt(number,start,end):
# if only one number remains return the maxFloorValue
if start == end:
return
global maxSquareValue
global maxFloorValue
# find average of two boundary numbers
mid = (end + start)//2
midSquare = mid*mid
"""
Case 1:
if square of average number itself is square root set maxFloorValue and return
Case 2:
if average number is greater than the number recurse to the first half of the numbers
Case 3:
if average number is less than the number set maxFloor value with average number and maxPerfectSquare with average square.
Here we have to recurse to the second half whether any number exist whose square is greater than earlier set average square.
"""
# Case 1
if midSquare == number:
maxFloorValue = mid
return
# Case 2
elif midSquare > number:
findSqrt(number,start,mid)
# Case 3
else:
if midSquare > maxSquareValue:
maxSquareValue = midSquare
maxFloorValue = mid
findSqrt(number,mid+1,end)
findSqrt(number,0,number)
return maxFloorValue
# Tese Cases
print ("Pass" if (3 == sqrt(9)) else "Fail")
# Perfect square of 3
print ("Pass" if (0 == sqrt(0)) else "Fail")
# Prints 0
print ("Pass" if (4 == sqrt(16)) else "Fail")
# Perfect square of 4
print ("Pass" if (1 == sqrt(1)) else "Fail")
# Square of 1 is 1
print ("Pass" if (26 == sqrt(676)) else "Fail")
# Perfect square of 26
print ("Pass" if (26 == sqrt(700)) else "Fail" )
# Floored square root of 700 is also 26
print ("Pass" if (100000 == sqrt(10000000008)) else "Fail" )
# Floored square root of 10000000008 will be 100000
|
[
"gurejamohit.32@gmail.com"
] |
gurejamohit.32@gmail.com
|
deb5f2b72979cfd127edf7c11a14da041eea92b6
|
11e383e9dd72548a9333f2270a2f707e2e221e64
|
/famOasis_Web/dh.py
|
c144cd719d50f0f22b675b21a308073cf538446c
|
[] |
no_license
|
ajaykumar1018/famOasis
|
6edba8c5c7d720e79973960d38855f4288ae78ee
|
1f6dfb60d13d2239f5be4bcdceba1608a9606196
|
refs/heads/master
| 2022-10-14T10:19:10.221257
| 2020-06-13T08:02:03
| 2020-06-13T08:02:03
| 255,266,160
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 693
|
py
|
import pickle
from Depression_detection_tweets import TweetClassifier
from tes import answers,session
from textblob import TextBlob
f = open('my_classifier.pickle', 'rb')
ff = open('my_classifier2.pickle','rb')
sc_tf_idf = pickle.load(f)
sc_bow = pickle.load(ff)
an = answers.query.all()
# text1 = answers.query.fiter_by(username="cv").order_by(id.desc()).first()
# print(text1)
#blob = TextBlob(text1)
# for sentence in blob.sentences:
# pol = sentence.sentiment.polarity
# bo = sc_bow.classify(sentence)
# st = sc_tf_idf.classify(sentence)
# print(pol,",",bo,",",st)
# if pol <0.5 or bo == True or st == True:
# print('Depressed')
f.close()
ff.close()
|
[
"42745121+ajaykumar1018@users.noreply.github.com"
] |
42745121+ajaykumar1018@users.noreply.github.com
|
787ee0eb14a92213c08a371b3ade200df4900c40
|
e4e4512859cc7b84236ec8ae9d90df614f6087b2
|
/anagrams.py
|
e49126cda7a8a0f9c78300872fe01a0bef76b2c3
|
[] |
no_license
|
csepdo/codecool
|
5bceaeb9dccbc7dbfbd547a0546308aa057c7f2a
|
a7219f73f4104763e54f4b438e8a9d47a2a26fbe
|
refs/heads/master
| 2020-03-27T15:28:57.304461
| 2018-09-07T21:37:14
| 2018-09-07T21:37:14
| 146,720,997
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 298
|
py
|
import sys
from itertools import groupby
file = sys.argv[1]
with open (file, 'r') as f:
anagrams = f.read().splitlines()
anagram_groups = [list(group) for key, group in groupby(sorted(anagrams,key=sorted),sorted)]
print('\n'.join('{}. {}'.format(*k) for k in enumerate(anagram_groups)))
|
[
"arodzeta@gmail.com"
] |
arodzeta@gmail.com
|
4423dbbd57cd1e8af4efb574b2b4bd76ccb3b0bd
|
097bbe7b57927d90d47345e2f8f2d4b259975c98
|
/tests/genmake.py
|
3e63e59dce1f38902da937055be3df075a2366fe
|
[
"BSD-2-Clause"
] |
permissive
|
sudokuhk/libfresample
|
e4fda01d4f92851f523a4987bd4d4ea8c9a891b8
|
884c39a71a3af579c40eb2798854b82bf54a4b37
|
refs/heads/master
| 2021-01-24T04:13:09.611311
| 2012-10-17T16:28:08
| 2012-10-17T16:36:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,990
|
py
|
#!/usr/bin/env python
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
class Makefile(object):
def __init__(self):
self._fp = StringIO()
self._all = set()
self._targets = set()
self._phony = set()
def add_default(self, x):
self._all.add(x)
def _write_dep(self, target, deps):
fp = self._fp
fp.write(target + ':')
for dep in deps:
fp.write(' ' + dep)
fp.write('\n')
def build(self, target, deps, *cmds):
if target in self._targets:
return
self._targets.add(target)
self._write_dep(target, deps)
for cmd in cmds:
self._fp.write('\t' + cmd + '\n')
def write(self, *line):
for line in line:
self._fp.write(line + '\n')
def save(self):
f = open('Makefile', 'w')
self._write_dep('all', sorted(self._all))
self._write_dep('.PHONY', sorted(self._phony))
f.write('all:\n')
f.write(self._fp.getvalue())
def phony(self, target, deps):
self._phony.add(target)
self._write_dep(target, deps)
make = Makefile()
make.build('Makefile', ['genmake.py'], 'python genmake.py')
make.write(
'FR := ../build/product/fresample',
'SOX := sox')
def test_sweep(depth, nchan, rate1, rate2):
name = 'sweep_r%ds%dn%d' % (rate1 // 1000, depth, nchan)
if nchan == 1:
cmd = 'synth 8 sine 0+%d' % (rate1 // 2)
else:
cmd = 'synth 8 sine 0+%d sine %d+0' % (rate1 // 2, rate1 // 2)
cmd = '$(SOX) -b %d -r %d -n $@ %s vol 0.999' % (depth, rate1, cmd)
make.build(name + '.wav', ['Makefile'], cmd)
sweeps = []
for q in range(11):
name2 = '%s_r%dq%02d' % (name, rate2 // 1000, q);
make.build(
name2 + '.wav', [name + '.wav', '$(FR)', 'Makefile'],
'$(FR) $(FRFLAGS) -q %d -r %d $< $@' % (q, rate2))
make.build(
name2 + '.png', [name2 + '.wav', 'Makefile'],
'sox $< -n spectrogram -w kaiser -o $@')
sweeps.append(name2 + '.png')
make.phony('sweep-q%d' % q, [name2 + '.png'])
make.phony('sweep-mono' if nchan == 1 else 'sweep-stereo', sweeps);
test_sweep(16, 1, 96000, 44100)
test_sweep(16, 1, 96000, 48000)
test_sweep(16, 1, 48000, 44100)
test_sweep(16, 2, 96000, 44100)
test_sweep(16, 2, 96000, 48000)
test_sweep(16, 2, 48000, 44100)
make.phony('sweep', ['sweep-mono', 'sweep-stereo'])
def test_correct(depth, nchan, rate1, rate2):
name = 'correct_r%ds%dn%d' % (rate1 // 1000, depth, nchan)
inpath = name + '.wav'
cmd = '$(SOX) -b %d -r %d -n $@ synth 16 whitenoise' % (depth, rate1)
if nchan == 2:
cmd += ' whitenoise'
make.build(inpath, ['Makefile'], cmd)
outputs = []
for q in range(6): # q6 and higher is floating point
outpath = '%s_r%dq%02d' % (name, rate2 // 1000, q)
out1 = outpath + '_1.wav'
out2 = outpath + '_2.wav'
make.build(
out1, [inpath, '$(FR)', 'Makefile'],
'$(FR) $(FRFLAGS) --cpu-features none -q %d -r %d $< $@' %
(q, rate2))
make.build(
out2, [inpath, '$(FR)', 'Makefile'],
'$(FR) $(FRFLAGS) --cpu-features all -q %d -r %d $< $@' %
(q, rate2))
outputs.append((out1, out2))
make.build(
name, [x for y in outputs for x in y],
*(['$(FR) --test-bufsize --cpu-features %s -q %d -r %d %s /dev/null'
% (f, q, rate2, inpath)
for q in range(11) for f in ['none', 'all']] +
['cmp %s %s' % x for x in outputs] +
['@echo === OUTPUT MATCHES ===']))
name2 = 'test-' + { 1: 'mono', 2: 'stereo' }.get(nchan, 'n%d' % nchan)
make.phony(name2, [name])
make.phony('test', [name2])
make.add_default('test')
test_correct(16, 1, 48000, 44100)
test_correct(16, 2, 48000, 44100)
make.write(
'clean:',
'\trm -f *.wav *.png')
make.save()
|
[
"depp@zdome.net"
] |
depp@zdome.net
|
7125798784ef01fa24733603fcc0ad6664d8c0ec
|
9cc7254a81b606e139db8ca39ea3f67039e7ceda
|
/src/application/commands/populate_db.py
|
f4e712b8ac50742a000d615068ec7bb19b97e93a
|
[] |
no_license
|
zxy-zxy/quiz_bot
|
58523e006a5c4ef88d2def67683efdad9d161d09
|
66e8b2de69b7a45de7f8de08d62b7f897a803605
|
refs/heads/master
| 2023-04-12T18:19:05.123427
| 2019-06-15T19:27:41
| 2019-06-15T19:27:41
| 190,882,831
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,709
|
py
|
import os
import sys
import itertools
import logging
from redis import exceptions as redis_exceptions
from application.models import QuizQuestion
from application.parser import QuizQuestionsFileParser
logger = logging.getLogger(__name__)
def run_command(quiz_questions_directory, default_encoding, files_limit=None):
"""
Populate redis database with quiz questions from provided files.
"""
logger.debug(
'Attempt to read files from directory {}.'.format(quiz_questions_directory)
)
try:
data_directory = quiz_questions_directory
files_list = [
os.path.join(data_directory, filepath)
for filepath in os.listdir(data_directory)
]
except FileNotFoundError as e:
logger.error(
'An error has occurred during reading exploring directory.'
'Directory: {}, error: {}'.format(quiz_questions_directory, str(e))
)
sys.exit(1)
logger.debug('DB population started.')
logger.debug(files_list)
populate_db_from_files(files_list, default_encoding, files_limit)
def populate_db_from_files(quiz_questions_filepaths, default_encoding, files_limit):
"""
:param quiz_questions_filepaths: list of filepaths to files with questions
:param default_encoding: target files encoding
:param files_limit if we want to limit how many files we want to parse:
Main function of this module, save questions into database.
"""
quiz_questions_lists_generator = parse_quiz_questions_files(
quiz_questions_filepaths, default_encoding
)
for quiz_questions_list in itertools.islice(
quiz_questions_lists_generator, files_limit
):
try:
QuizQuestion.bulk_save_to_db(quiz_questions_list)
except redis_exceptions.RedisError as e:
logger.error(str(e))
def parse_quiz_questions_files(quiz_questions_filepaths, encoding):
"""
yields list of QuizQuestion objects.
"""
for filepath in quiz_questions_filepaths:
try:
yield parse_quiz_question_file(filepath, encoding)
except (IOError, FileNotFoundError) as e:
logger.error(
'An error has occurred during parsing file.'
'File: {}, error: {}'.format(filepath, str(e))
)
continue
def parse_quiz_question_file(quiz_question_filepath, encoding):
"""
:param quiz_question_filepath: filepath to concrete file with questions
:param encoding: default encoding of concrete file
:return: list of QuizQuestion objects.
"""
with open(quiz_question_filepath, 'r', encoding=encoding) as f:
quiz_question_file_parser = QuizQuestionsFileParser(f)
question_list = [
question
for question in convert_question_dict_to_object(quiz_question_file_parser)
]
return question_list
def convert_question_dict_to_object(quiz_question_file_parser):
"""
Iterate over content of the file,
convert dict into QuizQuestion object, yield it.
"""
for question_dict in quiz_question_file_parser:
try:
quiz_question = QuizQuestion(**question_dict)
logger.debug(
'Question {} from file {} converted into '
'model object successfully.'.format(
quiz_question, quiz_question_file_parser.open_file.name
)
)
yield quiz_question
except ValueError as e:
logger.error(
'An error {} has occurred during parsing file: {}'.format(
str(e), quiz_question_file_parser.open_file.name
)
)
|
[
"sinitsinvanya@gmail.com"
] |
sinitsinvanya@gmail.com
|
d4847cc2ffa88846557e44295022e54e8e899956
|
164457b943d0b426e9a5e2eb57779e4e37f2d1bb
|
/the_tale/game/heroes/storage.py
|
34abfcf90eaa1db6bd9823c436cf520f42c1ceed
|
[
"BSD-2-Clause-Views"
] |
permissive
|
lshestov/the-tale
|
64334fd99a442ad736d9e8a38e8f0fb52d0ebab6
|
6229edfec6420307975269be9926c68ecdefb930
|
refs/heads/master
| 2021-01-18T08:38:44.147294
| 2015-10-27T18:43:10
| 2015-10-27T18:43:10
| 50,228,827
| 0
| 0
| null | 2016-01-23T07:38:54
| 2016-01-23T07:38:54
| null |
UTF-8
|
Python
| false
| false
| 1,984
|
py
|
# coding: utf-8
from utg import words as utg_words
from utg import relations as utg_relations
from the_tale.game.map.places import storage as places_storage
class PositionDescriptionsStorage(object):
def __init__(self):
self.clear()
def clear(self):
self._actual_places_version = places_storage.places_storage._version
self._position_in_place_cache = {}
self._position_near_place_cache = {}
self._position_on_road_cache = {}
def sync(self):
if places_storage.places_storage.version != self._actual_places_version:
self.clear()
def text_in_place(self, place_id):
self.sync()
if place_id not in self._position_in_place_cache:
self._position_in_place_cache[place_id] = places_storage.places_storage[place_id].name
return self._position_in_place_cache[place_id]
def text_near_place(self, place_id):
self.sync()
if place_id not in self._position_near_place_cache:
self._position_near_place_cache[place_id] = u'окрестности %s' % places_storage.places_storage[place_id].utg_name.form(utg_words.Properties(utg_relations.CASE.GENITIVE))
return self._position_near_place_cache[place_id]
def text_on_road(self, place_from_id, place_to_id):
self.sync()
key = (place_from_id, place_to_id)
if key not in self._position_on_road_cache:
self._position_on_road_cache[key] = u'дорога из %s в %s' % (places_storage.places_storage[place_from_id].utg_name.form(utg_words.Properties(utg_relations.CASE.GENITIVE)),
places_storage.places_storage[place_to_id].utg_name.form(utg_words.Properties(utg_relations.CASE.ACCUSATIVE)))
return self._position_on_road_cache[key]
def text_in_wild_lands(self):
return u'дикие земли'
position_descriptions = PositionDescriptionsStorage()
|
[
"a.eletsky@gmail.com"
] |
a.eletsky@gmail.com
|
17f97e77fbada90c673d10c240538123e9aa8a1c
|
ca2675e967d488e58386714c582c3d937e4cc2c7
|
/simsms.py
|
62a9fcba6139e7fc28af4e68e5291d8e52f042c2
|
[] |
no_license
|
renatick321/bot
|
b500e0159ae7952dce439a0d8d3d132748bbc5a0
|
21d302132f95282e2f138df9d8e0b8afed99b77c
|
refs/heads/master
| 2022-12-05T13:18:02.771551
| 2020-08-03T10:31:02
| 2020-08-03T10:31:02
| 284,669,851
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,944
|
py
|
import requests
import random
from time import sleep
APIKEY = 'JFOlfs3vSFy7YcCFzBVVKoKI7kQxJ4'
SERVICE = 'qw'
OPERATOR = None
BAD = 8
GOOD = 6
BALANCE_URL = f"https://smshub.org/stubs/handler_api.php?api_key={APIKEY}&action=getBalance"
NUMBER_URL = f"https://smshub.org/stubs/handler_api.php?api_key={APIKEY}&action=getNumber&service={SERVICE}"
smshub = {
"Telegram": "tg",
"Вконтакте": "vk",
"Whatsapp": "wa",
"Avito": "av",
"Qiwi": "qw",
"Пятерочка": "bd",
"McDonalds": "ry",
"PayPal": "ts",
"Burger King": "ip",
"Яндекс": "ya",
"BlaBlaCar": "ua",
"Instagram": "ig",
"Google": "go",
"Steam": "mt",
}
COUNTRY = {
"Россия": 0,
"Украина": 1,
"Казахстан": 2
}
# Сразу после получения номера доступны следующие действия:
# 8 - Отменить активацию
# 1 - Сообщить, что SMS отправлена (необязательно)
# Для активации со статусом 1:
# 8 - Отменить активацию
# ==========================================================
# Сразу после получения кода:
# 3 - Запросить еще одну смс
# 6 - Подтвердить SMS-код и завершить активацию
# Для активации со статусом 3:
# 6 - Подтвердить SMS-код и завершить активацию
def info():
l = [
' Сразу после получения номера доступны следующие действия:',
' 8 - Отменить активацию',
' 1 - Сообщить, что SMS отправлена (необязательно)',
' Для активации со статусом 1:',
' 8 - Отменить активацию',
'==========================================================',
' Сразу после получения кода:',
' 3 - Запросить еще одну смс',
' 6 - Подтвердить SMS-код и завершить активацию',
' Для активации со статусом 3:',
' 6 - Подтвердить SMS-код и завершить активацию'
]
print("\n".join(l))
def price_boost(price):
price = float(price)
if price < 1.5:
price = 3
elif price <= 5:
price = 7
elif price < 7:
price = 8
else:
price *= 1.7
price = float(int(price * 100)) / 100
return price
def get_price(service, country, apikey=APIKEY):
r = requests.get(f"http://simsms.org/priemnik.php?metod=get_service_price&country={country}&service={service}&apikey={apikey}")
d = r.json()
key = list(dict(d).keys())[0]
try:
a = d[key][service]
price = str(price_boost(str(list(a.keys())[0]))) + ' ₽'
except:
price = 'Нет в наличии'
return price
class Number:
def __init__(self, service, country, apikey=APIKEY):
self.country = COUNTRY[country]
self.service = service
self.apikey = apikey
try:
url = f"http://simsms.org/priemnik.php?metod=get_number&country={country}&service={service}&apikey={apikey}"
r = requests.get(url).json()
self.id, self.number = r["id"], r["number"]
except:
self.number = ""
def __str__(self):
return str(self.number)
def get_sms(self):
for i in range(240):
sleep(1)
r = requests.get(f"http://simsms.org/priemnik.php?metod=get_sms&country={self.country}&service={self.service}&id={self.id}&apikey={self.apikey}").json()
print(r)
if r["sms"]:
return r["sms"]
return "Аренда номера была отменена"
def edit_status(self, status):
r = requests.get(f"http://simsms.org/stubs/handler_api.php?api_key={self.apikey}&action=setStatus&status={status}&id={self.id}")
return r.text
def get_balance(self):
r = requests.get(f"http://simsms.org/priemnik.php?metod=get_balance&service=opt4&apikey={self.apikey}")
return r.text
|
[
"afarut5@ya.ru"
] |
afarut5@ya.ru
|
6aa4e0fe0055f74b1565433911d407bbeb89fe42
|
f02b21d5072cb66af643a7070cf0df4401229d6e
|
/leetcode/depth_first_search/737-are_sentences_similar_2.py
|
dd14083e5fc358f23aafd1704a573f4e43389df1
|
[] |
no_license
|
dbconfession78/interview_prep
|
af75699f191d47be1239d7f842456c68c92b95db
|
7f9572fc6e72bcd3ef1a22b08db099e1d21a1943
|
refs/heads/master
| 2018-10-09T22:03:55.283172
| 2018-06-23T01:18:00
| 2018-06-23T01:18:00
| 110,733,251
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,438
|
py
|
class Solution:
def areSentencesSimilarTwo(self, words1, words2, pairs):
"""
:type words1: List[str]
:type words2: List[str]
:type pairs: List[List[str]]
:rtype: bool
"""
if len(words1) != len(words2):
return False
graph = {}
for w1, w2 in pairs:
graph[w1] = w2
graph[w2] = w1
i = 0
while i < len(words1):
if self.check_path(graph, words1[i], words2[i]):
print('yes')
else:
print('no')
def check_path(self, graph, w1, w2):
q = [graph.get(w1)]
while q:
row_len = len(q)
for i in range(row_len):
top = q[i]
q.append(graph.get(top))
q = q[row_len:]
print(root)
print()
def main():
words1 = ["great", "acting", "skills"]
words2 = ["fine", "drama", "talent"]
pairs = [["great", "good"], ["fine", "good"], ["drama", "acting"], ["skills", "talent"]]
print(Solution().areSentencesSimilarTwo(words1, words2, pairs))
if __name__ == '__main__':
main()
# Instructions
"""
Given two sentences words1, words2 (each represented as an array of strings), and a list of similar word pairs pairs, determine if two sentences are similar.
For example, words1 = ["great", "acting", "skills"] and words2 = ["fine", "drama", "talent"] are similar, if the similar word pairs are pairs = [["great", "good"], ["fine", "good"], ["acting","drama"], ["skills","talent"]].
Note that the similarity relation is transitive. For example, if "great" and "good" are similar, and "fine" and "good" are similar, then "great" and "fine" are similar.
Similarity is also symmetric. For example, "great" and "fine" being similar is the same as "fine" and "great" being similar.
Also, a word is always similar with itself. For example, the sentences words1 = ["great"], words2 = ["great"], pairs = [] are similar, even though there are no specified similar word pairs.
Finally, sentences can only be similar if they have the same number of words. So a sentence like words1 = ["great"] can never be similar to words2 = ["doubleplus","good"].
Note:
The length of words1 and words2 will not exceed 1000.
The length of pairs will not exceed 2000.
The length of each pairs[i] will be 2.
The length of each words[i] and pairs[i][j] will be in the range [1, 20].
"""
|
[
"Hyrenkosa1"
] |
Hyrenkosa1
|
6ac74eecc49751a4e763521bcc7080c337b70b26
|
638e55b080837f1ee7615a67cbd878062d469121
|
/NesneTabanli.py
|
1e3ad58d1b28457dd703d28164a2e12770eaebe2
|
[] |
no_license
|
burakkuru5534/pythonprojects
|
40e3a21c4795712a20f6c398567f44aa7f9286cd
|
42eaf79b04a267a31c00272734b43f4e2c069747
|
refs/heads/master
| 2020-03-29T23:09:56.270840
| 2018-09-28T13:46:48
| 2018-09-28T13:46:48
| 150,459,388
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 263
|
py
|
# class tanımlaması ve class ın nesnesini tanımlamak #
class Sinifim():
#classın değişken ve fonksiyonları
sinif_degiskeni="değişken"
def sinif_fonk():
pass
Nesnem = Sinifim()
print(Nesnem.sinif_degiskeni)
# damn it
|
[
"noreply@github.com"
] |
burakkuru5534.noreply@github.com
|
ac61e4c05327592079b4fceaf56f8993dfe156fc
|
0b22832a6f1babb063ea38774a4437702d83bf81
|
/pygame_geometry/materialline.py
|
5a731246f4d2885a4d79669a82a6289a73e7240e
|
[
"MIT"
] |
permissive
|
MarcPartensky/Pygame-Geometry
|
c3d7c68bcec19092aecd3faf89b838b36f192a49
|
61abbbeac0fd351253e06b19736d9939fd5b316e
|
refs/heads/master
| 2022-12-08T15:23:24.467015
| 2020-09-06T00:29:56
| 2020-09-06T00:29:56
| 293,174,314
| 7
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 550
|
py
|
from .abstract import Line
class MaterialLine(Line,Material):
"""Base class of the ground class."""
def __init__(self,point,angle,mass,**kwargs):
"""Create a material line."""
super().__init__(point,angle,**kwargs)
self.mass=mass
class Ground(MaterialLine):
"""Subclass of the material line ."""
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
if __name__=="__main__":
from .surface import Surface
surface=Surface()
line=MaterialLine()
print(MaterialLine.null)
|
[
"marc.partensky@gmail.com"
] |
marc.partensky@gmail.com
|
c961fed5839570f6d3167e4e2845df645059d079
|
2ce950f21909f6c9ed8d58b1dea4a524f90a173d
|
/allmessages/apps.py
|
d209ceccab4ab30591c4792e9ea5b5d9de12062a
|
[] |
no_license
|
denyslipin/chatroom-app
|
2d845c76b4557c14a272e04b699e4de02fd3e70e
|
505fad9eca45000b704f16f197caf03df208bfa8
|
refs/heads/master
| 2023-04-03T00:41:30.589696
| 2021-03-29T19:45:55
| 2021-03-29T19:45:55
| 352,712,227
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 97
|
py
|
from django.apps import AppConfig
class AllmessagesConfig(AppConfig):
name = 'allmessages'
|
[
"den@Macs-MacBook-Pro.local"
] |
den@Macs-MacBook-Pro.local
|
bf7cfe2eeca247650a3cbad4ee46a6f05258f6e0
|
f3a9b9c553a68c63f144129cb16bffbeb842e1db
|
/core/constants.py
|
71a2d123c7ed97be8b08f5e4bff254c87541519c
|
[] |
no_license
|
adwojak/MapGenerator
|
5604742bab40b7a5f7527ab71884df3777d3a484
|
d2ffb0ed1400eae3787a3dabf18241aed1ffd353
|
refs/heads/main
| 2023-06-11T14:12:14.910380
| 2021-07-10T20:43:26
| 2021-07-10T20:43:26
| 371,958,898
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 294
|
py
|
EMPTY_SPACE = "."
STARTING = "STARTING"
EXIT = "EXIT"
EASY_PATH = "EASY_PATH"
UP = "UP"
DOWN = "DOWN"
LEFT = "LEFT"
RIGHT = "RIGHT"
MIDDLE = "MIDDLE"
OPPOSITE_FACES = {UP: DOWN, DOWN: UP, LEFT: RIGHT, RIGHT: LEFT}
ROOM_KINDS = {STARTING: "S", EXIT: "E", EASY_PATH: "P", "EASY_PATH2": "H"}
|
[
"dwojaka207@gmail.com"
] |
dwojaka207@gmail.com
|
0d8b25837532ae72462d9dab6bfd5dac50b518f6
|
026c1b047604bf572a489c916fc2423e007de84e
|
/pfapp/migrations/0004_user_details.py
|
077de4a51d8966ca4de55421517fc4f83ccdb34d
|
[] |
no_license
|
akbarali97/FarmersHub
|
0015e0e107617da359fd6afc0aad36ef8cae0f35
|
2fbff85306964921a9af22ff9d78553d807eba52
|
refs/heads/master
| 2023-08-02T00:33:33.586064
| 2020-06-10T18:34:23
| 2020-06-10T18:34:23
| 249,766,365
| 1
| 1
| null | 2021-09-22T18:58:42
| 2020-03-24T16:57:21
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 635
|
py
|
# Generated by Django 3.0.3 on 2020-04-23 18:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pfapp', '0003_auto_20200418_2036'),
]
operations = [
migrations.CreateModel(
name='user_details',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('gender', models.CharField(max_length=30)),
('dob', models.DateField(max_length=8)),
],
),
]
|
[
"31400505+akbarali97@users.noreply.github.com"
] |
31400505+akbarali97@users.noreply.github.com
|
cd156b436a035984a81714acebc4b8032ed39fee
|
bfc01b4240baadf0531c5dd7f1b4757a4651782e
|
/complex_data_structures/pythonhashmap.py
|
c60738b12557ec14d62bcb489ceba06ede37437b
|
[] |
no_license
|
jmmander/codeacademy
|
f3bf9ba005fc3577dd9d42e21aafaa831facd816
|
85ea3086117117cfa124caa2838772ad02d5f08d
|
refs/heads/master
| 2022-12-29T13:19:53.736360
| 2020-10-16T18:18:35
| 2020-10-16T18:18:35
| 259,692,170
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,139
|
py
|
class HashMap:
def __init__(self, array_size):
self.array_size = array_size
self.array = [None for item in range(array_size)]
def hash(self, key, count_collisions=0):
key_bytes = key.encode()
hash_code = sum(key_bytes)
return hash_code + count_collisions
def compressor(self, hash_code):
return hash_code % self.array_size
def assign(self, key, value):
array_index = self.compressor(self.hash(key))
current_array_value = self.array[array_index]
if current_array_value is None:
self.array[array_index] = [key, value]
return
if current_array_value[0] == key:
self.array[array_index] = [key, value]
return
# Collision!
number_collisions = 1
while(current_array_value[0] != key):
new_hash_code = self.hash(key, number_collisions)
new_array_index = self.compressor(new_hash_code)
current_array_value = self.array[new_array_index]
if current_array_value is None:
self.array[new_array_index] = [key, value]
return
if current_array_value[0] == key:
self.array[new_array_index] = [key, value]
return
number_collisions += 1
def retrieve(self, key):
array_index = self.compressor(self.hash(key))
possible_return_value = self.array[array_index]
if possible_return_value is None:
return None
if possible_return_value[0] == key:
return possible_return_value[1]
retrieval_collisions = 1
while (possible_return_value != key):
new_hash_code = self.hash(key, retrieval_collisions)
retrieving_array_index = self.compressor(new_hash_code)
possible_return_value = self.array[retrieving_array_index]
if possible_return_value is None:
return None
if possible_return_value[0] == key:
return possible_return_value[1]
retrieval_collisions += 1
hash_map = HashMap(15)
hash_map.assign('gabbro', 'igneous')
hash_map.assign('sandstone', 'sedimentary')
hash_map.assign('gneiss', 'metamorphic')
print(hash_map.retrieve('gabbro'))
print(hash_map.retrieve('sandstone'))
print(hash_map.retrieve('gneiss'))
|
[
"noreply@github.com"
] |
jmmander.noreply@github.com
|
db8653c53cd6692d5ae90a913a5b37d457020761
|
a82c25a49527f48fa16f2a5303d68458ff10bdf6
|
/wagtail/mywebsite/mywebsite/blog/migrations/0002_blogpage.py
|
45b40dac6c51fcf081ffcec20a8bb1d13140d9e8
|
[] |
no_license
|
vdiquez/python_practice
|
c5159c4ea1f8b1e667e09191198dbea3d3fbb11a
|
ee137e6ff1775340ec3fa6e80fb26bb68564883a
|
refs/heads/main
| 2023-03-14T06:17:22.694692
| 2021-03-01T12:09:11
| 2021-03-01T12:09:11
| 337,362,935
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 955
|
py
|
# Generated by Django 3.1.5 on 2021-02-09 19:36
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0059_apply_collection_ordering'),
('blog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='BlogPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.page')),
('date', models.DateField(verbose_name='Post date')),
('intro', models.CharField(max_length=250)),
('body', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
|
[
"victor.ocanto@grupodiusframi.com.co"
] |
victor.ocanto@grupodiusframi.com.co
|
5272bf043cf544758d30293e9dba82ea6b67d7d7
|
521c1beeb2776161ae6d550be35cd0c180887129
|
/elvis/Web_selenium/test_selenium/test_gps_cloud.py
|
73ba994f28f98f184e079b285b9e2ff4a64f1768
|
[] |
no_license
|
elvis2workspace/CustomLibrary
|
601b552792ac2c33beeb709474f857c82793ac7e
|
6449eea8aa99ca1172f54b669d97703d36132ce3
|
refs/heads/master
| 2021-01-23T21:33:05.617871
| 2017-09-26T01:57:48
| 2017-09-26T01:57:48
| 58,983,388
| 0
| 1
| null | 2016-12-06T09:56:14
| 2016-05-17T02:22:14
|
Python
|
UTF-8
|
Python
| false
| false
| 5,906
|
py
|
# -*- coding: utf-8 -*-
import time
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from elvis.utils.os_opt.file_action import *
class Gps_Cloud(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(30)
self.login_url = "http://gps.dev-ag.56qq.com/login.html"
self.verificationErrors = []
self.accept_next_alert = True
# def test_gpscloud_login(self):
# u"""gps cloud login"""
#
# driver = self.driver
# driver.get(self.base_url)
# try:
# driver.find_element_by_name("userName").send_keys("gpstest")
# pwd_elem = driver.find_element_by_id("JS_password")
# pwd_elem.send_keys("123123")
# pwd_elem.send_keys(Keys.ENTER)
# time.sleep(5)
# except:
# driver.get_screenshot_as_file("D:\\selenium_use_case\\error_png\\kw.png") # 如未找到元素就截取当前页
#
# finally:
# driver.close()
# def test_gps_car(self):
# driver = self.driver
# driver.get(self.login_url)
# driver.maximize_window()
# try:
# # LOGIN GPS CLOUD PLATFORM
# driver.find_element_by_name("userName").send_keys("gpstest")
# pwd_elem = driver.find_element_by_id("JS_password")
# pwd_elem.send_keys("123123")
# pwd_elem.send_keys(Keys.ENTER)
# time.sleep(5)
#
# nav_carManage = driver.find_element_by_xpath("//*[@id=\"nav_carManage\"]")
#
# # rtn = driver.execute_script('arguments[0].click()', more)
# chain = ActionChains(driver)
# chain.move_to_element(nav_carManage).perform()
# driver.find_element_by_xpath("//*[@id=\"subnav_carManage\"]/li[1]").click()
#
# except:
# driver.get_screenshot_as_file("D:\\kw.png") # 如未找到元素就截取当前页
# # finally:
# # # driver.close()
# # pass
def test_gps_car_hide(self):
driver = self.driver
driver.get(self.login_url)
driver.maximize_window()
try:
# LOGIN GPS CLOUD PLATFORM
driver.find_element_by_name("userName").send_keys("gpstest")
pwd_elem = driver.find_element_by_id("JS_password")
pwd_elem.send_keys("123123")
pwd_elem.send_keys(Keys.ENTER)
time.sleep(5)
nav_carManage = driver.find_element_by_xpath("//*[@id=\"nav_carManage\"]")
# rtn = driver.execute_script('arguments[0].click()', more)
js = "var q=document.getElementById('subnav_carManage');q.style.display='block';"
# setAttribute(\"style\",\"display:block\
driver.execute_script(js)
driver.find_element_by_css_selector("#subnav_carManage > li:nth-child(1)").click()
time.sleep(5)
# 将页面滚动条拖到底部
# js = "var q=document.documentElement.scrollTop|| window.pageYOffset || document.body.scrollTop; q=10000;"
js = "window.scrollTo(0, document.body.scrollHeight)"
driver.execute_script(js)
time.sleep(3)
print "page bottom"
# # 移动到元素element对象的“顶端”与当前窗口的“顶部”对齐
#
# driver.execute_script("arguments[0].scrollIntoView();", element)
#
# driver.execute_script("arguments[0].scrollIntoView(true);", element)
#
# # 移动到元素element对象的“底端”与当前窗口的“底部”对齐
#
# driver.execute_script("arguments[0].scrollIntoView(false);", element)
#
# # 移动到页面最底部
#
# driver.execute_script("window.scrollTo(0, document.body.scrollHeight)")
#
# # 移动到指定的坐标(相对当前的坐标移动)
#
# driver.execute_script("window.scrollBy(0, 700)")
# # Thread.sleep(3000)
# # 结合上面的scrollBy语句,相当于移动到700 + 800 = 1600像素位置
#
# driver.execute_script("window.scrollBy(0, 800)")
#
# # 移动到窗口绝对位置坐标,如下移动到纵坐标1600像素位置
#
# driver.execute_script("window.scrollTo(0, 1600)")
# # Thread.sleep(3000);
# # 结合上面的scrollTo语句,仍然移动到纵坐标1200像素位置
#
# driver.execute_script("window.scrollTo(0, 1200)")
#
# # 将滚动条移动到页面的顶部
# js = "var q=document.documentElement.scrollTop=0"
# driver.execute_script(js)
# time.sleep(3)
# print "page top"
# chains = ActionChains(driver)
# chains.send_keys(Keys.PAGE_DOWN).perform()
# print "bottom."
driver.find_element_by_id("carList_export").click()
# driver.find_element_by_xpath("//*[@id=\"subnav_carManage\"]/li[1]").click()
check_file(download_path, u"车辆基础信息-20161027101240.xls")
except:
driver.get_screenshot_as_file("D:\\kw.png") # 如未找到元素就截取当前页
# finally:
# # driver.close()
# pass
def tearDown(self):
# self.driver.quit
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(Gps_Cloud("test_gps_car"))
suite.addTest(Gps_Cloud("test_gps_car_hide"))
# 这里可以添加更多的用例,如:
# suite.addTest(Youdao("aaaa"))
unittest.TextTestRunner().run(suite)
|
[
"xiuhai5052@hotmail.com"
] |
xiuhai5052@hotmail.com
|
60def32aa20ef5dc414bd621bb9d1b9d5c1d1bc5
|
be8e3c729972b429e5e2ccfd050393d70a3b26b3
|
/Combine/dom-analysis.py
|
777bacbc1652ab043e4806c567af84fa999b4612
|
[] |
no_license
|
sofwerx/safehouse-data-transformations
|
30358f0423d5b542ce514d1ce532c5778ada822c
|
602faf8788df6f8e4f9b8f9eae5d069570be22eb
|
refs/heads/master
| 2021-04-12T08:13:31.420624
| 2018-04-23T17:12:16
| 2018-04-23T17:12:16
| 126,072,300
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,699
|
py
|
# coding: utf-8
# In[24]:
# Initialize Variables
# Datafile Name
datafile = "domoticz.csv"
# Data Preperation
# Notes
# Assuming Dateime is not unique
# Assuming not many nonzero variance columns
# If datarows are larger than 100,000 will only sample 100,000
# Import Libraries
import pandas as pd
from summarizeDataFrame import summarizeDataset
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
from pandas.plotting import scatter_matrix
import sys
# Need to inlcude for Pots
get_ipython().magic(u'matplotlib inline')
# import Data
df = pd.read_csv(datafile)
# Add Datetime format
df['datetime'] = pd.to_datetime(df['datetime'])
#df1 = df[(df['datetime'] > '2018-03-01') & (df['datetime'] < '2013-03-30')]
df1 = df[(df['datetime'] > '2018-03-26')]
# #Remove attributes with zero variance. Assumming Datetime is not Unique
# df2 = df1.loc[:,df1.apply(pd.Series.nunique) != 1]
# del df1
# # Seperate Numerical and Categorical Variable to View data
# numerics = ['int16', 'int32', 'int64', 'float16', 'float32', 'float64']
# numerical = df2.select_dtypes(numerics)
# categorical = df2.drop(list(df2.select_dtypes(numerics)), axis=1)
# #df_num = df.apply(lambda x: x.cat.codes)
# print("Categorical Variables" + "\n")
# print(categorical.head())
# print("\n"+ "Numerical Variables" + "\n")
# print(numerical.head())
# Data Understanding
# View Summary of dataset
print("\n" + "Categorical Data Summary")
summarizeDataset(categorical)
print("Numerical Data Summary")
summarizeDataset(numerical)
display(df2)
#df2.to_csv("test.csv" , index=False)
# print(df1.dtypes)
# print(df1.head())
# print("Total Rows:",len(df1) ,'\n')
# In[ ]:
|
[
"david.salvador@sofwerx.org"
] |
david.salvador@sofwerx.org
|
f16efb496479cb9615556bdaf695e3a72fd48ec9
|
fd2b8a1e64f9d61ef4312ef6416c0ad257b13a32
|
/Scripts/tkinter_tutorial.py
|
ee59f1908d87f621e00f59c8dbcd93580f68a15f
|
[] |
no_license
|
David-Fde/Commander
|
09414fa7120ec5e9bcff4e242784937a77e30973
|
079c8f3bc0f4f9d16cd0282af4a6f478d2f97685
|
refs/heads/main
| 2023-06-15T18:31:13.145142
| 2021-04-14T09:05:37
| 2021-04-14T09:05:37
| 322,359,350
| 0
| 0
| null | 2021-04-14T09:05:37
| 2020-12-17T17:00:39
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 499
|
py
|
import tkinter
window = tkinter.Tk()
# to rename the title of the window
window.title("GUI")
# pack is used to show the object in the window
label = tkinter.Label(window, text = "Hello World!", font=("Arial Bold",20))
txt = tkinter.Entry(window,width=10)
txt.grid(column=1,row=0)
def clicked():
res = "Welcome to " + txt.get()
tkinter.Label.configure(text=res)
bt = tkinter.Button(window, text="Enter", command=clicked)
bt.grid(column=2,row=0)
window.geometry("350x200")
window.mainloop()
|
[
"fernandezvazquezd@gmail.com"
] |
fernandezvazquezd@gmail.com
|
b40a54eaab98089552b8f3ec7360ea33a89a29f5
|
d80ab47432a3b852b9426c5d03a549a7462e40aa
|
/pytorch3d/renderer/mesh/clip.py
|
d35e1a9a60e501cd5cf073e04bbdd403de39992c
|
[
"BSD-3-Clause"
] |
permissive
|
filsv/pytorch3d
|
3b37ca30f1c901f0286fea1056a2caa4d0c7e612
|
b8790474f16994d75e6cf64447080f1b52bcc292
|
refs/heads/master
| 2023-06-13T07:26:34.317775
| 2021-06-28T13:30:27
| 2021-06-28T13:31:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 34,814
|
py
|
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, List, Optional, Tuple
import torch
"""
Mesh clipping is done before rasterization and is implemented using 4 cases
(these will be referred to throughout the functions below)
Case 1: the triangle is completely in front of the clipping plane (it is left
unchanged)
Case 2: the triangle is completely behind the clipping plane (it is culled)
Case 3: the triangle has exactly two vertices behind the clipping plane (it is
clipped into a smaller triangle)
Case 4: the triangle has exactly one vertex behind the clipping plane (it is clipped
into a smaller quadrilateral and divided into two triangular faces)
After rasterization, the Fragments from the clipped/modified triangles
are mapped back to the triangles in the original mesh. The indices,
barycentric coordinates and distances are all relative to original mesh triangles.
NOTE: It is assumed that all z-coordinates are in world coordinates (not NDC
coordinates), while x and y coordinates may be in NDC/screen coordinates
(i.e after applying a projective transform e.g. cameras.transform_points(points)).
"""
class ClippedFaces:
"""
Helper class to store the data for the clipped version of a Meshes object
(face_verts, mesh_to_face_first_idx, num_faces_per_mesh) along with
conversion information (faces_clipped_to_unclipped_idx, barycentric_conversion,
faces_clipped_to_conversion_idx, clipped_faces_neighbor_idx) required to convert
barycentric coordinates from rasterization of the clipped Meshes to barycentric
coordinates in terms of the unclipped Meshes.
Args:
face_verts: FloatTensor of shape (F_clipped, 3, 3) giving the verts of
each of the clipped faces
mesh_to_face_first_idx: an tensor of shape (N,), where N is the number of meshes
in the batch. The ith element stores the index into face_verts
of the first face of the ith mesh.
num_faces_per_mesh: a tensor of shape (N,) storing the number of faces in each mesh.
faces_clipped_to_unclipped_idx: (F_clipped,) shaped LongTensor mapping each clipped
face back to the face in faces_unclipped (i.e. the faces in the original meshes
obtained using meshes.faces_packed())
barycentric_conversion: (T, 3, 3) FloatTensor, where barycentric_conversion[i, :, k]
stores the barycentric weights in terms of the world coordinates of the original
(big) unclipped triangle for the kth vertex in the clipped (small) triangle.
If the rasterizer then expresses some NDC coordinate in terms of barycentric
world coordinates for the clipped (small) triangle as alpha_clipped[i,:],
alpha_unclipped[i, :] = barycentric_conversion[i, :, :]*alpha_clipped[i, :]
faces_clipped_to_conversion_idx: (F_clipped,) shaped LongTensor mapping each clipped
face to the applicable row of barycentric_conversion (or set to -1 if conversion is
not needed).
clipped_faces_neighbor_idx: LongTensor of shape (F_clipped,) giving the index of the
neighboring face for each case 4 triangle. e.g. for a case 4 face with f split
into two triangles (t1, t2): clipped_faces_neighbor_idx[t1_idx] = t2_idx.
Faces which are not clipped and subdivided are set to -1 (i.e cases 1/2/3).
"""
__slots__ = [
"face_verts",
"mesh_to_face_first_idx",
"num_faces_per_mesh",
"faces_clipped_to_unclipped_idx",
"barycentric_conversion",
"faces_clipped_to_conversion_idx",
"clipped_faces_neighbor_idx",
]
def __init__(
self,
face_verts: torch.Tensor,
mesh_to_face_first_idx: torch.Tensor,
num_faces_per_mesh: torch.Tensor,
faces_clipped_to_unclipped_idx: Optional[torch.Tensor] = None,
barycentric_conversion: Optional[torch.Tensor] = None,
faces_clipped_to_conversion_idx: Optional[torch.Tensor] = None,
clipped_faces_neighbor_idx: Optional[torch.Tensor] = None,
) -> None:
self.face_verts = face_verts
self.mesh_to_face_first_idx = mesh_to_face_first_idx
self.num_faces_per_mesh = num_faces_per_mesh
self.faces_clipped_to_unclipped_idx = faces_clipped_to_unclipped_idx
self.barycentric_conversion = barycentric_conversion
self.faces_clipped_to_conversion_idx = faces_clipped_to_conversion_idx
self.clipped_faces_neighbor_idx = clipped_faces_neighbor_idx
class ClipFrustum:
"""
Helper class to store the information needed to represent a view frustum
(left, right, top, bottom, znear, zfar), which is used to clip or cull triangles.
Values left as None mean that culling should not be performed for that axis.
The parameters perspective_correct, cull, and z_clip_value are used to define
behavior for clipping triangles to the frustum.
Args:
left: NDC coordinate of the left clipping plane (along x axis)
right: NDC coordinate of the right clipping plane (along x axis)
top: NDC coordinate of the top clipping plane (along y axis)
bottom: NDC coordinate of the bottom clipping plane (along y axis)
znear: world space z coordinate of the near clipping plane
zfar: world space z coordinate of the far clipping plane
perspective_correct: should be set to True for a perspective camera
cull: if True, triangles outside the frustum should be culled
z_clip_value: if not None, then triangles should be clipped (possibly into
smaller triangles) such that z >= z_clip_value. This avoids projections
that go to infinity as z->0
"""
__slots__ = [
"left",
"right",
"top",
"bottom",
"znear",
"zfar",
"perspective_correct",
"cull",
"z_clip_value",
]
def __init__(
self,
left: Optional[float] = None,
right: Optional[float] = None,
top: Optional[float] = None,
bottom: Optional[float] = None,
znear: Optional[float] = None,
zfar: Optional[float] = None,
perspective_correct: bool = False,
cull: bool = True,
z_clip_value: Optional[float] = None,
) -> None:
self.left = left
self.right = right
self.top = top
self.bottom = bottom
self.znear = znear
self.zfar = zfar
self.perspective_correct = perspective_correct
self.cull = cull
self.z_clip_value = z_clip_value
def _get_culled_faces(face_verts: torch.Tensor, frustum: ClipFrustum) -> torch.Tensor:
"""
Helper function used to find all the faces in Meshes which are
fully outside the view frustum. A face is culled if all 3 vertices are outside
the same axis of the view frustum.
Args:
face_verts: An (F,3,3) tensor, where F is the number of faces in
the packed representation of Meshes. The 2nd dimension represents the 3 vertices
of a triangle, and the 3rd dimension stores the xyz locations of each
vertex.
frustum: An instance of the ClipFrustum class with the information on the
position of the clipping planes.
Returns:
faces_culled: An boolean tensor of size F specifying whether or not each face should be
culled.
"""
clipping_planes = (
(frustum.left, 0, "<"),
(frustum.right, 0, ">"),
(frustum.top, 1, "<"),
(frustum.bottom, 1, ">"),
(frustum.znear, 2, "<"),
(frustum.zfar, 2, ">"),
)
faces_culled = torch.zeros(
[face_verts.shape[0]], dtype=torch.bool, device=face_verts.device
)
for plane in clipping_planes:
clip_value, axis, op = plane
# If clip_value is None then don't clip along that plane
if frustum.cull and clip_value is not None:
if op == "<":
verts_clipped = face_verts[:, axis] < clip_value
else:
verts_clipped = face_verts[:, axis] > clip_value
# If all verts are clipped then face is outside the frustum
faces_culled |= verts_clipped.sum(1) == 3
return faces_culled
def _find_verts_intersecting_clipping_plane(
face_verts: torch.Tensor,
p1_face_ind: torch.Tensor,
clip_value: float,
perspective_correct: bool,
) -> Tuple[Tuple[Any, Any, Any, Any, Any], List[Any]]:
r"""
Helper function to find the vertices used to form a new triangle for case 3/case 4 faces.
Given a list of triangles that are already known to intersect the clipping plane,
solve for the two vertices p4 and p5 where the edges of the triangle intersects the
clipping plane.
p1
/\
/ \
/ t \
_____________p4/______\p5__________ clip_value
/ \
/____ \
p2 ---____\p3
Args:
face_verts: An (F,3,3) tensor, where F is the number of faces in
the packed representation of the Meshes, the 2nd dimension represents
the 3 vertices of the face, and the 3rd dimension stores the xyz locations of each
vertex. The z-coordinates must be represented in world coordinates, while
the xy-coordinates may be in NDC/screen coordinates (i.e. after projection).
p1_face_ind: A tensor of shape (N,) with values in the range of 0 to 2. In each
case 3/case 4 triangle, two vertices are on the same side of the
clipping plane and the 3rd is on the other side. p1_face_ind stores the index of
the vertex that is not on the same side as any other vertex in the triangle.
clip_value: Float, the z-value defining where to clip the triangle.
perspective_correct: Bool, Should be set to true if a perspective camera was
used and xy-coordinates of face_verts_unclipped are in NDC/screen coordinates.
Returns:
A 2-tuple
p: (p1, p2, p3, p4, p5))
p_barycentric (p1_bary, p2_bary, p3_bary, p4_bary, p5_bary)
Each of p1...p5 is an (F,3) tensor of the xyz locations of the 5 points in the
diagram above for case 3/case 4 faces. Each p1_bary...p5_bary is an (F, 3) tensor
storing the barycentric weights used to encode p1...p5 in terms of the the original
unclipped triangle.
"""
# Let T be number of triangles in face_verts (note that these correspond to the subset
# of case 1 or case 2 triangles). p1_face_ind, p2_face_ind, and p3_face_ind are (T)
# tensors with values in the range of 0 to 2. p1_face_ind stores the index of the
# vertex that is not on the same side as any other vertex in the triangle, and
# p2_face_ind and p3_face_ind are the indices of the other two vertices preserving
# the same counterclockwise or clockwise ordering
T = face_verts.shape[0]
p2_face_ind = torch.remainder(p1_face_ind + 1, 3)
p3_face_ind = torch.remainder(p1_face_ind + 2, 3)
# p1, p2, p3 are (T, 3) tensors storing the corresponding (x, y, z) coordinates
# of p1_face_ind, p2_face_ind, p3_face_ind
# pyre-ignore[16]
p1 = face_verts.gather(1, p1_face_ind[:, None, None].expand(-1, -1, 3)).squeeze(1)
p2 = face_verts.gather(1, p2_face_ind[:, None, None].expand(-1, -1, 3)).squeeze(1)
p3 = face_verts.gather(1, p3_face_ind[:, None, None].expand(-1, -1, 3)).squeeze(1)
##################################
# Solve for intersection point p4
##################################
# p4 is a (T, 3) tensor is the point on the segment between p1 and p2 that
# intersects the clipping plane.
# Solve for the weight w2 such that p1.z*(1-w2) + p2.z*w2 = clip_value.
# Then interpolate p4 = p1*(1-w2) + p2*w2 where it is assumed that z-coordinates
# are expressed in world coordinates (since we want to clip z in world coordinates).
w2 = (p1[:, 2] - clip_value) / (p1[:, 2] - p2[:, 2])
p4 = p1 * (1 - w2[:, None]) + p2 * w2[:, None]
if perspective_correct:
# It is assumed that all z-coordinates are in world coordinates (not NDC
# coordinates), while x and y coordinates may be in NDC/screen coordinates.
# If x and y are in NDC/screen coordinates and a projective transform was used
# in a perspective camera, then we effectively want to:
# 1. Convert back to world coordinates (by multiplying by z)
# 2. Interpolate using w2
# 3. Convert back to NDC/screen coordinates (by dividing by the new z=clip_value)
p1_world = p1[:, :2] * p1[:, 2:3]
p2_world = p2[:, :2] * p2[:, 2:3]
p4[:, :2] = (p1_world * (1 - w2[:, None]) + p2_world * w2[:, None]) / clip_value
##################################
# Solve for intersection point p5
##################################
# p5 is a (T, 3) tensor representing the point on the segment between p1 and p3 that
# intersects the clipping plane.
# Solve for the weight w3 such that p1.z * (1-w3) + p2.z * w3 = clip_value,
# and then interpolate p5 = p1 * (1-w3) + p3 * w3
w3 = (p1[:, 2] - clip_value) / (p1[:, 2] - p3[:, 2])
w3 = w3.detach()
p5 = p1 * (1 - w3[:, None]) + p3 * w3[:, None]
if perspective_correct:
# Again if using a perspective camera, convert back to world coordinates
# interpolate and convert back
p1_world = p1[:, :2] * p1[:, 2:3]
p3_world = p3[:, :2] * p3[:, 2:3]
p5[:, :2] = (p1_world * (1 - w3[:, None]) + p3_world * w3[:, None]) / clip_value
# Set the barycentric coordinates of p1,p2,p3,p4,p5 in terms of the original
# unclipped triangle in face_verts.
T_idx = torch.arange(T, device=face_verts.device)
p_barycentric = [torch.zeros((T, 3), device=face_verts.device) for i in range(5)]
p_barycentric[0][(T_idx, p1_face_ind)] = 1
p_barycentric[1][(T_idx, p2_face_ind)] = 1
p_barycentric[2][(T_idx, p3_face_ind)] = 1
p_barycentric[3][(T_idx, p1_face_ind)] = 1 - w2
p_barycentric[3][(T_idx, p2_face_ind)] = w2
p_barycentric[4][(T_idx, p1_face_ind)] = 1 - w3
p_barycentric[4][(T_idx, p3_face_ind)] = w3
p = (p1, p2, p3, p4, p5)
return p, p_barycentric
###################
# Main Entry point
###################
def clip_faces(
face_verts_unclipped: torch.Tensor,
mesh_to_face_first_idx: torch.Tensor,
num_faces_per_mesh: torch.Tensor,
frustum: ClipFrustum,
) -> ClippedFaces:
"""
Clip a mesh to the portion contained within a view frustum and with z > z_clip_value.
There are two types of clipping:
1) Cull triangles that are completely outside the view frustum. This is purely
to save computation by reducing the number of triangles that need to be
rasterized.
2) Clip triangles into the portion of the triangle where z > z_clip_value. The
clipped region may be a quadrilateral, which results in splitting a triangle
into two triangles. This does not save computation, but is necessary to
correctly rasterize using perspective cameras for triangles that pass through
z <= 0, because NDC/screen coordinates go to infinity at z=0.
Args:
face_verts_unclipped: An (F, 3, 3) tensor, where F is the number of faces in
the packed representation of Meshes, the 2nd dimension represents the 3 vertices
of the triangle, and the 3rd dimension stores the xyz locations of each
vertex. The z-coordinates must be represented in world coordinates, while
the xy-coordinates may be in NDC/screen coordinates
mesh_to_face_first_idx: an tensor of shape (N,), where N is the number of meshes
in the batch. The ith element stores the index into face_verts_unclipped
of the first face of the ith mesh.
num_faces_per_mesh: a tensor of shape (N,) storing the number of faces in each mesh.
frustum: a ClipFrustum object defining the frustum used to cull faces.
Returns:
clipped_faces: ClippedFaces object storing a clipped version of the Meshes
along with tensors that can be used to convert barycentric coordinates
returned by rasterization of the clipped meshes into a barycentric
coordinates for the unclipped meshes.
"""
F = face_verts_unclipped.shape[0]
device = face_verts_unclipped.device
# Triangles completely outside the view frustum will be culled
# faces_culled is of shape (F, )
faces_culled = _get_culled_faces(face_verts_unclipped, frustum)
# Triangles that are partially behind the z clipping plane will be clipped to
# smaller triangles
z_clip_value = frustum.z_clip_value
perspective_correct = frustum.perspective_correct
if z_clip_value is not None:
# (F, 3) tensor (where F is the number of triangles) marking whether each vertex
# in a triangle is behind the clipping plane
faces_clipped_verts = face_verts_unclipped[:, :, 2] < z_clip_value
# (F) dim tensor containing the number of clipped vertices in each triangle
faces_num_clipped_verts = faces_clipped_verts.sum(1)
else:
faces_num_clipped_verts = torch.zeros([F], device=device)
# If no triangles need to be clipped or culled, avoid unnecessary computation
# and return early
if faces_num_clipped_verts.sum().item() == 0 and faces_culled.sum().item() == 0:
return ClippedFaces(
face_verts=face_verts_unclipped,
mesh_to_face_first_idx=mesh_to_face_first_idx,
num_faces_per_mesh=num_faces_per_mesh,
)
#####################################################################################
# Classify faces into the 4 relevant cases:
# 1) The triangle is completely in front of the clipping plane (it is left
# unchanged)
# 2) The triangle is completely behind the clipping plane (it is culled)
# 3) The triangle has exactly two vertices behind the clipping plane (it is
# clipped into a smaller triangle)
# 4) The triangle has exactly one vertex behind the clipping plane (it is clipped
# into a smaller quadrilateral and split into two triangles)
#####################################################################################
# pyre-ignore[16]:
faces_unculled = ~faces_culled
# Case 1: no clipped verts or culled faces
cases1_unclipped = (faces_num_clipped_verts == 0) & faces_unculled
case1_unclipped_idx = cases1_unclipped.nonzero(as_tuple=True)[0]
# Case 2: all verts clipped
case2_unclipped = (faces_num_clipped_verts == 3) | faces_culled
# Case 3: two verts clipped
case3_unclipped = (faces_num_clipped_verts == 2) & faces_unculled
case3_unclipped_idx = case3_unclipped.nonzero(as_tuple=True)[0]
# Case 4: one vert clipped
case4_unclipped = (faces_num_clipped_verts == 1) & faces_unculled
case4_unclipped_idx = case4_unclipped.nonzero(as_tuple=True)[0]
# faces_unclipped_to_clipped_idx is an (F) dim tensor storing the index of each
# face to the corresponding face in face_verts_clipped.
# Each case 2 triangle will be culled (deleted from face_verts_clipped),
# while each case 4 triangle will be split into two smaller triangles
# (replaced by two consecutive triangles in face_verts_clipped)
# case2_unclipped is an (F,) dim 0/1 tensor of all the case2 faces
# case4_unclipped is an (F,) dim 0/1 tensor of all the case4 faces
faces_delta = case4_unclipped.int() - case2_unclipped.int()
# faces_delta_cum gives the per face change in index. Faces which are
# clipped in the original mesh are mapped to the closest non clipped face
# in face_verts_clipped (this doesn't matter as they are not used
# during rasterization anyway).
faces_delta_cum = faces_delta.cumsum(0) - faces_delta
delta = 1 + case4_unclipped.int() - case2_unclipped.int()
# pyre-ignore[16]
faces_unclipped_to_clipped_idx = delta.cumsum(0) - delta
###########################################
# Allocate tensors for the output Meshes.
# These will then be filled in for each case.
###########################################
F_clipped = (
F + faces_delta_cum[-1].item() + faces_delta[-1].item()
) # Total number of faces in the new Meshes
face_verts_clipped = torch.zeros(
(F_clipped, 3, 3), dtype=face_verts_unclipped.dtype, device=device
)
faces_clipped_to_unclipped_idx = torch.zeros(
[F_clipped], dtype=torch.int64, device=device
)
# Update version of mesh_to_face_first_idx and num_faces_per_mesh applicable to
# face_verts_clipped
mesh_to_face_first_idx_clipped = faces_unclipped_to_clipped_idx[
mesh_to_face_first_idx
]
F_clipped_t = torch.full([1], F_clipped, dtype=torch.int64, device=device)
num_faces_next = torch.cat((mesh_to_face_first_idx_clipped[1:], F_clipped_t))
num_faces_per_mesh_clipped = num_faces_next - mesh_to_face_first_idx_clipped
################# Start Case 1 ########################################
# Case 1: Triangles are fully visible, copy unchanged triangles into the
# appropriate position in the new list of faces
case1_clipped_idx = faces_unclipped_to_clipped_idx[case1_unclipped_idx]
face_verts_clipped[case1_clipped_idx] = face_verts_unclipped[case1_unclipped_idx]
faces_clipped_to_unclipped_idx[case1_clipped_idx] = case1_unclipped_idx
# If no triangles need to be clipped but some triangles were culled, avoid
# unnecessary clipping computation
if case3_unclipped_idx.shape[0] + case4_unclipped_idx.shape[0] == 0:
return ClippedFaces(
face_verts=face_verts_clipped,
mesh_to_face_first_idx=mesh_to_face_first_idx_clipped,
num_faces_per_mesh=num_faces_per_mesh_clipped,
faces_clipped_to_unclipped_idx=faces_clipped_to_unclipped_idx,
)
################# End Case 1 ##########################################
################# Start Case 3 ########################################
# Case 3: exactly two vertices are behind the camera, clipping the triangle into a
# triangle. In the diagram below, we clip the bottom part of the triangle, and add
# new vertices p4 and p5 by intersecting with the clipping plane. The updated
# triangle is the triangle between p4, p1, p5
#
# p1 (unclipped vertex)
# /\
# / \
# / t \
# _____________p4/______\p5__________ clip_value
# xxxxxxxxxxxxxx/ \xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# xxxxxxxxxxxxx/____ \xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# xxxxxxxxxx p2 xxxx---____\p3 xxxxxxxxxxxxxxxxxxxxxxxxxxx
# xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
faces_case3 = face_verts_unclipped[case3_unclipped_idx]
# index (0, 1, or 2) of the vertex in front of the clipping plane
p1_face_ind = torch.where(~faces_clipped_verts[case3_unclipped_idx])[1]
# Solve for the points p4, p5 that intersect the clipping plane
p, p_barycentric = _find_verts_intersecting_clipping_plane(
faces_case3, p1_face_ind, z_clip_value, perspective_correct
)
p1, _, _, p4, p5 = p
p1_barycentric, _, _, p4_barycentric, p5_barycentric = p_barycentric
# Store clipped triangle
case3_clipped_idx = faces_unclipped_to_clipped_idx[case3_unclipped_idx]
t_barycentric = torch.stack((p4_barycentric, p5_barycentric, p1_barycentric), 2)
face_verts_clipped[case3_clipped_idx] = torch.stack((p4, p5, p1), 1)
faces_clipped_to_unclipped_idx[case3_clipped_idx] = case3_unclipped_idx
################# End Case 3 ##########################################
################# Start Case 4 ########################################
# Case 4: exactly one vertex is behind the camera, clip the triangle into a
# quadrilateral. In the diagram below, we clip the bottom part of the triangle,
# and add new vertices p4 and p5 by intersecting with the cliiping plane. The
# unclipped region is a quadrilateral, which is split into two triangles:
# t1: p4, p2, p5
# t2: p5, p2, p3
#
# p3_____________________p2
# \ __--/
# \ t2 __-- /
# \ __-- t1 /
# ______________p5\__--_________/p4_________clip_value
# xxxxxxxxxxxxxxxxx\ /xxxxxxxxxxxxxxxxxx
# xxxxxxxxxxxxxxxxxx\ /xxxxxxxxxxxxxxxxxxx
# xxxxxxxxxxxxxxxxxxx\ /xxxxxxxxxxxxxxxxxxxx
# xxxxxxxxxxxxxxxxxxxx\ /xxxxxxxxxxxxxxxxxxxxx
# xxxxxxxxxxxxxxxxxxxxx\ /xxxxxxxxxxxxxxxxxxxxx
# xxxxxxxxxxxxxxxxxxxxxx\ /xxxxxxxxxxxxxxxxxxxxx
# p1 (clipped vertex)
faces_case4 = face_verts_unclipped[case4_unclipped_idx]
# index (0, 1, or 2) of the vertex behind the clipping plane
p1_face_ind = torch.where(faces_clipped_verts[case4_unclipped_idx])[1]
# Solve for the points p4, p5 that intersect the clipping plane
p, p_barycentric = _find_verts_intersecting_clipping_plane(
faces_case4, p1_face_ind, z_clip_value, perspective_correct
)
_, p2, p3, p4, p5 = p
_, p2_barycentric, p3_barycentric, p4_barycentric, p5_barycentric = p_barycentric
# Store clipped triangles
case4_clipped_idx = faces_unclipped_to_clipped_idx[case4_unclipped_idx]
face_verts_clipped[case4_clipped_idx] = torch.stack((p4, p2, p5), 1)
face_verts_clipped[case4_clipped_idx + 1] = torch.stack((p5, p2, p3), 1)
t1_barycentric = torch.stack((p4_barycentric, p2_barycentric, p5_barycentric), 2)
t2_barycentric = torch.stack((p5_barycentric, p2_barycentric, p3_barycentric), 2)
faces_clipped_to_unclipped_idx[case4_clipped_idx] = case4_unclipped_idx
faces_clipped_to_unclipped_idx[case4_clipped_idx + 1] = case4_unclipped_idx
##################### End Case 4 #########################
# Triangles that were clipped (case 3 & case 4) will require conversion of
# barycentric coordinates from being in terms of the smaller clipped triangle to in terms
# of the original big triangle. If there are T clipped triangles,
# barycentric_conversion is a (T, 3, 3) tensor, where barycentric_conversion[i, :, k]
# stores the barycentric weights in terms of the world coordinates of the original
# (big) triangle for the kth vertex in the clipped (small) triangle. If our
# rasterizer then expresses some NDC coordinate in terms of barycentric
# world coordinates for the clipped (small) triangle as alpha_clipped[i,:],
# alpha_unclipped[i, :] = barycentric_conversion[i, :, :]*alpha_clipped[i, :]
barycentric_conversion = torch.cat((t_barycentric, t1_barycentric, t2_barycentric))
# faces_clipped_to_conversion_idx is an (F_clipped,) shape tensor mapping each output
# face to the applicable row of barycentric_conversion (or set to -1 if conversion is
# not needed)
faces_to_convert_idx = torch.cat(
(case3_clipped_idx, case4_clipped_idx, case4_clipped_idx + 1), 0
)
barycentric_idx = torch.arange(
barycentric_conversion.shape[0], dtype=torch.int64, device=device
)
faces_clipped_to_conversion_idx = torch.full(
[F_clipped], -1, dtype=torch.int64, device=device
)
faces_clipped_to_conversion_idx[faces_to_convert_idx] = barycentric_idx
# clipped_faces_quadrilateral_ind is an (F_clipped) dim tensor
# For case 4 clipped triangles (where a big triangle is split in two smaller triangles),
# store the index of the neighboring clipped triangle.
# This will be needed because if the soft rasterizer includes both
# triangles in the list of top K nearest triangles, we
# should only use the one with the smaller distance.
clipped_faces_neighbor_idx = torch.full(
[F_clipped], -1, dtype=torch.int64, device=device
)
clipped_faces_neighbor_idx[case4_clipped_idx] = case4_clipped_idx + 1
clipped_faces_neighbor_idx[case4_clipped_idx + 1] = case4_clipped_idx
clipped_faces = ClippedFaces(
face_verts=face_verts_clipped,
mesh_to_face_first_idx=mesh_to_face_first_idx_clipped,
num_faces_per_mesh=num_faces_per_mesh_clipped,
faces_clipped_to_unclipped_idx=faces_clipped_to_unclipped_idx,
barycentric_conversion=barycentric_conversion,
faces_clipped_to_conversion_idx=faces_clipped_to_conversion_idx,
clipped_faces_neighbor_idx=clipped_faces_neighbor_idx,
)
return clipped_faces
def convert_clipped_rasterization_to_original_faces(
pix_to_face_clipped, bary_coords_clipped, clipped_faces: ClippedFaces
) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Convert rasterization Fragments (expressed as pix_to_face_clipped,
bary_coords_clipped, dists_clipped) of clipped Meshes computed using clip_faces()
to the corresponding rasterization Fragments where barycentric coordinates and
face indices are in terms of the original unclipped Meshes. The distances are
handled in the rasterizer C++/CUDA kernels (i.e. for Cases 1/3 the distance
can be used directly and for Case 4 triangles the distance of the pixel to
the closest of the two subdivided triangles is used).
Args:
pix_to_face_clipped: LongTensor of shape (N, image_size, image_size,
faces_per_pixel) giving the indices of the nearest faces at each pixel,
sorted in ascending z-order. Concretely
``pix_to_face_clipped[n, y, x, k] = f`` means that ``faces_verts_clipped[f]``
is the kth closest face (in the z-direction) to pixel (y, x). Pixels that
are hit by fewer than faces_per_pixel are padded with -1.
bary_coords_clipped: FloatTensor of shape
(N, image_size, image_size, faces_per_pixel, 3) giving the barycentric
coordinates in world coordinates of the nearest faces at each pixel, sorted
in ascending z-order. Concretely, if ``pix_to_face_clipped[n, y, x, k] = f``
then ``[w0, w1, w2] = bary_coords_clipped[n, y, x, k]`` gives the
barycentric coords for pixel (y, x) relative to the face defined by
``unproject(face_verts_clipped[f])``. Pixels hit by fewer than
faces_per_pixel are padded with -1.
clipped_faces: an instance of ClippedFaces class giving the auxillary variables
for converting rasterization outputs from clipped to unclipped Meshes.
Returns:
3-tuple: (pix_to_face_unclipped, bary_coords_unclipped, dists_unclipped) that
have the same definition as (pix_to_face_clipped, bary_coords_clipped,
dists_clipped) except that they pertain to faces_verts_unclipped instead of
faces_verts_clipped (i.e the original meshes as opposed to the modified meshes)
"""
faces_clipped_to_unclipped_idx = clipped_faces.faces_clipped_to_unclipped_idx
# If no clipping then return inputs
if (
faces_clipped_to_unclipped_idx is None
or faces_clipped_to_unclipped_idx.numel() == 0
):
return pix_to_face_clipped, bary_coords_clipped
device = pix_to_face_clipped.device
# Convert pix_to_face indices to now refer to the faces in the unclipped Meshes.
# Init empty tensor to fill in all the background values which have pix_to_face=-1.
empty = torch.full(pix_to_face_clipped.shape, -1, device=device, dtype=torch.int64)
pix_to_face_unclipped = torch.where(
pix_to_face_clipped != -1,
faces_clipped_to_unclipped_idx[pix_to_face_clipped],
empty,
)
# For triangles that were clipped into smaller triangle(s), convert barycentric
# coordinates from being in terms of the clipped triangle to being in terms of the
# original unclipped triangle.
# barycentric_conversion is a (T, 3, 3) tensor such that
# alpha_unclipped[i, :] = barycentric_conversion[i, :, :]*alpha_clipped[i, :]
barycentric_conversion = clipped_faces.barycentric_conversion
# faces_clipped_to_conversion_idx is an (F_clipped,) shape tensor mapping each output
# face to the applicable row of barycentric_conversion (or set to -1 if conversion is
# not needed)
faces_clipped_to_conversion_idx = clipped_faces.faces_clipped_to_conversion_idx
if barycentric_conversion is not None:
bary_coords_unclipped = bary_coords_clipped.clone()
# Select the subset of faces that require conversion, where N is the sum
# number of case3/case4 triangles that are in the closest k triangles to some
# rasterized pixel.
pix_to_conversion_idx = torch.where(
pix_to_face_clipped != -1,
faces_clipped_to_conversion_idx[pix_to_face_clipped],
empty,
)
faces_to_convert_mask = pix_to_conversion_idx != -1
N = faces_to_convert_mask.sum().item()
# Expand to (N, H, W, K, 3) to be the same shape as barycentric coordinates
faces_to_convert_mask_expanded = faces_to_convert_mask[:, :, :, :, None].expand(
-1, -1, -1, -1, 3
)
# An (N,) dim tensor of indices into barycentric_conversion
conversion_idx_subset = pix_to_conversion_idx[faces_to_convert_mask]
# An (N, 3, 1) tensor of barycentric coordinates in terms of the clipped triangles
bary_coords_clipped_subset = bary_coords_clipped[faces_to_convert_mask_expanded]
bary_coords_clipped_subset = bary_coords_clipped_subset.reshape((N, 3, 1))
# An (N, 3, 3) tensor storing matrices to convert from clipped to unclipped
# barycentric coordinates
bary_conversion_subset = barycentric_conversion[conversion_idx_subset]
# An (N, 3, 1) tensor of barycentric coordinates in terms of the unclipped triangle
bary_coords_unclipped_subset = bary_conversion_subset.bmm(
bary_coords_clipped_subset
)
bary_coords_unclipped_subset = bary_coords_unclipped_subset.reshape([N * 3])
bary_coords_unclipped[
faces_to_convert_mask_expanded
] = bary_coords_unclipped_subset
# dists for case 4 faces will be handled in the rasterizer
# so no need to modify them here.
else:
bary_coords_unclipped = bary_coords_clipped
return pix_to_face_unclipped, bary_coords_unclipped
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
29743da945b32bb253a1d188218d8518c97ca081
|
b3e385806fd08ecdf4e4f9e57a03593f76986f6d
|
/main.py
|
70dea63c8f25c9d75b85204fc6b0889f453e98db
|
[
"MIT"
] |
permissive
|
AnweshCR7/MelanomaDetection
|
d70f762883a9f35aad4ed4467fc7d50be4dc5caa
|
186707833f9dc88b799c3aa8613e53ef762e3ad1
|
refs/heads/main
| 2023-02-09T05:57:55.652883
| 2021-01-01T16:10:33
| 2021-01-01T16:10:33
| 322,903,614
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,676
|
py
|
import os
import torch
import numpy as np
import pandas as pd
import albumentations
import config
from model import SEResnext50_32x4d
from sklearn import metrics
from dataset import ClassificationDataset
from engine import Engine
from early_stopping import EarlyStopping
import ssl; ssl._create_default_https_context = ssl._create_stdlib_context
def train(fold):
training_data_path = config.DATA_DIR + "train"
df = pd.read_csv(config.CSV_PATH + "train_folds.csv")
device = config.DEVICE
epochs = config.EPOCHS
train_bs = config.TRAIN_BATCH_SIZE
valid_bs = config.EVAL_BATCH_SIZE
# Train images -> images except the one in current fold
# Test images -> images in current fold
df_train = df[df.kfold != fold].reset_index(drop=True)
df_valid = df[df.kfold == fold].reset_index(drop=True)
model = SEResnext50_32x4d(pretrained="imagenet")
model.to(device)
# Known std and mean for image norm
mean = (0.485, 0.456, 0.406)
std = (0.229, 0.224, 0.225)
# Define Image Augmentations
train_aug = albumentations.Compose(
[
albumentations.Normalize(mean, std, max_pixel_value=255.0, always_apply=True)
# albumentations.ShiftScaleRotate(shift_limit=0.0625, scale_limit=0.1, rotate_limit=15),
# albumentations.Flip(p=0.5)
]
)
valid_aug = albumentations.Compose(
[
albumentations.Normalize(mean, std, max_pixel_value=255.0, always_apply=True)
]
)
train_images = df_train.image_name.values.tolist()
train_images = [os.path.join(training_data_path, i + ".png") for i in train_images]
train_targets = df_train.target.values
valid_images = df_valid.image_name.values.tolist()
valid_images = [os.path.join(training_data_path, i + ".png") for i in valid_images]
valid_targets = df_valid.target.values
train_dataset = ClassificationDataset(
image_paths=train_images,
targets=train_targets,
resize=None,
augmentations=train_aug,
)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=train_bs, shuffle=True, num_workers=config.NUM_WORKERS
)
valid_dataset = ClassificationDataset(
image_paths=valid_images,
targets=valid_targets,
resize=None,
augmentations=valid_aug,
)
valid_loader = torch.utils.data.DataLoader(
valid_dataset, batch_size=valid_bs, shuffle=False, num_workers=config.NUM_WORKERS
)
optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
optimizer,
patience=3,
threshold=0.001,
mode="max"
)
# Initialize the Engine
engine = Engine(model=model, optimizer=optimizer, device=device, scheduler=scheduler)
es = EarlyStopping(patience=5, mode="max")
for epoch in range(epochs):
train_loss = engine.train(train_loader)
predictions, valid_loss = engine.evaluate(valid_loader)
predictions = np.vstack((predictions)).ravel()
auc = metrics.roc_auc_score(valid_targets, predictions)
print(f"Epoch = {epoch}, AUC = {auc}")
scheduler.step(auc)
es(auc, model, model_path=f"model_fold_{fold}.bin")
if es.early_stop:
print("Early stopping")
break
def predict(fold):
test_data_path = config.DATA_DIR + "test"
df = pd.read_csv(config.CSV_PATH + "test.csv")
device = "cuda"
model_path=f"model_fold_{fold}.bin"
mean = (0.485, 0.456, 0.406)
std = (0.229, 0.224, 0.225)
aug = albumentations.Compose(
[
albumentations.Normalize(mean, std, max_pixel_value=255.0, always_apply=True)
]
)
images = df.image_name.values.tolist()
images = [os.path.join(test_data_path, i + ".png") for i in images]
targets = np.zeros(len(images))
test_dataset = ClassificationDataset(
image_paths=images,
targets=targets,
resize=None,
augmentations=aug,
)
test_loader = torch.utils.data.DataLoader(
test_dataset, batch_size=16, shuffle=False, num_workers=config.NUM_WORKERS
)
model = SEResnext50_32x4d(pretrained=None)
model.load_state_dict(torch.load(model_path))
model.to(device)
# Okay to pass None to optimizer for prediction step
engine = Engine(model=model, optimizer=None, device=device)
predictions = engine.predict(test_loader)
predictions = np.vstack(predictions).ravel()
return predictions
if __name__ == '__main__':
# num_folds = 10
# for fold in range(num_folds):
# train(fold)
train(0)
|
[
"anwesh.marwade@beyondsports.nl"
] |
anwesh.marwade@beyondsports.nl
|
1da97c6af4872548bebbafaac8f989b1d0ff3de2
|
4d6431b09b5db7c3cb93c047a8dc275d94e63bdb
|
/miscellaneous/tiny-curve.py
|
c6c3e6e2dd7e87809d5df233d4af05fd8c4d4aff
|
[] |
no_license
|
phm87/bitp0wn
|
8e0c8b3f0601fbaaa228a78f87546341989d7419
|
cb8d863ad0e4ae3a239593dcb9eecfbfe159127c
|
refs/heads/master
| 2021-10-27T19:40:50.666876
| 2019-04-19T06:43:00
| 2019-04-19T06:43:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,891
|
py
|
#!/usr/bin/env python
import random
""" modular inverse """
def inv(a, n):
if a == 0:
return 0
lm, hm = 1, 0
low, high = a % n, n
while low > 1:
r = high//low
nm, new = hm-lm*r, high-low*r
lm, low, hm, high = nm, new, lm, low
return lm % n
""" Jacobian """
def to_jacobian(p):
o = (p[0], p[1], 1)
return o
def from_jacobian(p, P):
z = inv(p[2], P)
return ((p[0] * z**2) % P, (p[1] * z**3) % P)
def jacobian_double(p, P):
if not p[1]:
return (0, 0, 0)
ysq = (p[1] ** 2) % P
S = (4 * p[0] * ysq) % P
M = (3 * p[0] ** 2 + A * p[2] ** 4) % P
nx = (M**2 - 2 * S) % P
ny = (M * (S - nx) - 8 * ysq ** 2) % P
nz = (2 * p[1] * p[2]) % P
return (nx, ny, nz)
def jacobian_add(p, q, P):
if not p[1]:
return q
if not q[1]:
return p
U1 = (p[0] * q[2] ** 2) % P
U2 = (q[0] * p[2] ** 2) % P
S1 = (p[1] * q[2] ** 3) % P
S2 = (q[1] * p[2] ** 3) % P
if U1 == U2:
if S1 != S2:
return (0, 0, 1)
return jacobian_double(p, P)
H = U2 - U1
R = S2 - S1
H2 = (H * H) % P
H3 = (H * H2) % P
U1H2 = (U1 * H2) % P
nx = (R ** 2 - H3 - 2 * U1H2) % P
ny = (R * (U1H2 - nx) - S1 * H3) % P
nz = (H * p[2] * q[2]) % P
return (nx, ny, nz)
def jacobian_multiply(a, n, P):
if a[1] == 0 or n == 0:
return (0, 0, 1)
if n == 1:
return a
if (n % 2) == 0:
return jacobian_double(jacobian_multiply(a, n//2, P), P)
if (n % 2) == 1:
return jacobian_add(jacobian_double(jacobian_multiply(a, n//2, P), P), a, P)
""" Elliptic curve functions """
def fast_add(a, b, P):
return from_jacobian(jacobian_add(to_jacobian(a), to_jacobian(b), P), P)
def fast_substract((x1, y1), (x2, y2), P):
return fast_add((x1, y1), (x2, -y2), P)
def fast_multiply(a, n, P):
return from_jacobian(jacobian_multiply(to_jacobian(a), n, P), P)
""" Legendre symbol
Compute Legendre symbol (a|p) using Euler's criterion.
p is a prime, a is relatively prime to p (if p divides a, then a|p = 0)
Returns 1 if a has a square root modulo p, -1 otherwise.
"""
def legendre_symbol(a, p):
ls = pow(a, (p - 1) / 2, p)
return -1 if ls == p - 1 else ls
""" Tonelli-Shanks algorithm
Find a square root of n modulo p.
Solve for r in a congruence of the form r^2 = n (mod p), where p is a prime
"""
def tonnelli_shanks(a, p):
# Partition p-1 to s * 2^e for an odd s (i.e. reduce all the powers of 2 from p-1)
s = p - 1
e = 0
while s % 2 == 0:
s /= 2
e += 1
# Find some 'n' with a legendre symbol n|p = -1.
n = 2
while legendre_symbol(n, p) != -1:
n += 1
# x is a guess of the square root that gets better with each iteration.
# b is the "fudge factor" - by how much we're off with the guess.
# The invariant x^2 = ab (mod p) is maintained throughout the loop.
# g is used for successive powers of n to update both a and b
# r is the exponent - decreases with each update
x = pow(a, (s + 1) / 2, p)
b = pow(a, s, p)
g = pow(n, s, p)
r = e
while True:
t = b
m = 0
for m in xrange(r):
if t == 1:
break
t = pow(t, 2, p)
if m == 0:
return x
gs = pow(g, 2 ** (r - m - 1), p)
g = (gs * gs) % p
x = (x * gs) % p
b = (b * g) % p
r = m
""" Newton's method to compute sqrt """
def isqrt(n):
x = n
y = (x + 1) // 2
while y < x:
x = y
y = (x + n // x) // 2
return x
""" Deterministic variant of the Miller-Rabin primality test
See http://miller-rabin.appspot.com/ for more informations
"""
def _try_composite(a, d, n, s):
if pow(a, d, n) == 1:
return False
for i in range(s):
if pow(a, 2**i * d, n) == n-1:
return False
return True # n is definitely composite
def is_prime(n, _precision_for_huge_n=40):
if n in _known_primes:
return True
if any((n % p) == 0 for p in _known_primes) or n in (0, 1):
return False
d, s = n - 1, 0
while not d % 2:
d, s = d >> 1, s + 1
# Returns exact according to http://primes.utm.edu/prove/prove2_3.html
if n < 1373653:
return not any(_try_composite(a, d, n, s) for a in (2, 3))
if n < 25326001:
return not any(_try_composite(a, d, n, s) for a in (2, 3, 5))
if n < 118670087467:
if n == 3215031751:
return False
return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7))
if n < 2152302898747:
return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7, 11))
if n < 3474749660383:
return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7, 11, 13))
if n < 341550071728321:
return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7, 11, 13, 17))
# otherwise
return not any(_try_composite(a, d, n, s) for a in _known_primes[:_precision_for_huge_n])
_known_primes = [2, 3]
_known_primes += [x for x in range(5, 1000, 2) if is_prime(x)]
""" Prime generation """
def generate_prime(nbits=1024):
p = random.getrandbits(nbits)
while p < 2**(nbits-1) or not is_prime(p, 20):
p = random.getrandbits(nbits)
return p
""" Curve generation
Generate a curve defined over a Weierstrass function
"""
# Generate P
P = generate_prime(32)
print("P = {0}".format(P))
# Generate A & B
while True:
A = random.randint(0, P)
B = random.randint(0, P)
if (4*A*A*A + 27*B*B) % P != 0:
break;
print("A = {0}".format(A))
print("B = {0}".format(B))
while True:
# Generate G, a random point on the curve
while True:
x = random.randint(0, P)
xcubedaxb = (x*x*x+A*x+B) % P
if legendre_symbol(xcubedaxb, P) == 1:
if P % 4 == 3:
y = int(pow(xcubedaxb, (P+1)//4, P))
else:
y = int(tonnelli_shanks(xcubedaxb, P))
assert (y**2 - xcubedaxb) % P == 0
G = (x, y)
break;
# Calculate order N from Hasse theorem and bsgs algorithm
sqrt_p = isqrt(P)
min_m, max_m = P + 1 - 2 * sqrt_p, P + 1 + 2 * sqrt_p
steps = isqrt(max_m - min_m)
m_candidates = []
O = (0, 0)
baby_steps = {}
for x in range(steps):
baby_steps[O] = x
O = fast_add(O, G, P)
O = fast_multiply(G, min_m, P)
O_ADDER = fast_multiply(G, steps, P)
for factor_giant in range(steps):
substract_res = fast_substract((0,0), O, P)
if substract_res in baby_steps:
factor_baby = baby_steps[substract_res]
m_candidates.append((factor_giant * steps) + factor_baby + min_m)
O = fast_add(O, O_ADDER, P)
if len(m_candidates) == 1:
print("G = {0}".format(G))
print("N = {0}".format(m_candidates[0]))
break;
|
[
"mvincenti@umi.us"
] |
mvincenti@umi.us
|
d0d587e1b4b9469f3ac1c1243ce87a1f15209c52
|
a63f1df096b93689ef96344ae75d6c2bb940bd78
|
/case/data_test.py
|
e3659eb0dd23c71882dc1f5f949fb3a80a76da2c
|
[] |
no_license
|
Remoterwls/-Selenium3-Python3-Web-
|
9c292f881da30b672785bc7161a2abde42233b40
|
52681b5a7b9d0cb62248f7885d003c125b3c9f02
|
refs/heads/master
| 2021-09-25T21:49:46.538240
| 2018-10-26T01:29:57
| 2018-10-26T01:29:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 393
|
py
|
#coding=utf-8
import ddt
import unittest
@ddt.ddt
class DataTest(unittest.TestCase):
def setUp(self):
print("这是个setUp")
def tearDown(self):
print("这是个tearDown")
@ddt.data(
[1,2],
[3,4],
[5,6]
)
@ddt.unpack
def test_add(self,a,b):
print(a+b)
if __name__ == '__main__':
unittest.main()
|
[
"lirui2721@126.com"
] |
lirui2721@126.com
|
ea4ee0db551dfcfbf277aae55fb16ea53fd98a8d
|
b183c98f7749a015ca420940be85f8af6c004bb3
|
/easy/724.py
|
169275e02d0543c1aba327ee320b67f1951cae11
|
[
"Apache-2.0"
] |
permissive
|
oneTaken/leetcode
|
b8cfa7e0ff42de2eaef8b64cceef4f183006612e
|
f9357d839ac8fa6333b0d7eeb2028ba28a63764c
|
refs/heads/master
| 2020-03-12T10:08:12.200753
| 2018-05-05T05:12:24
| 2018-05-05T05:12:24
| 130,566,847
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,427
|
py
|
# solution1
# exceed time 732/741
class Solution:
def pivotIndex(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
for i in range(len(nums)):
if sum(nums[:i]) == sum(nums[i+1:]):
return i
return -1
# solution2
# exceed time 734/741
class Solution2:
def pivotIndex(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
flag = bool(sum(nums) % 2)
for i in range(len(nums)):
if bool(nums[i] % 2) ^ flag:
continue
if sum(nums[:i]) == sum(nums[i+1:]):
return i
return -1
# solution3
# exceed time 740/741
class Solution3:
def pivotIndex(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
total = sum(nums)
flag = bool(total % 2)
for i in range(len(nums)):
if bool(nums[i] % 2) ^ flag:
continue
if sum(nums[:i]) * 2 == total - nums[i]:
return i
return -1
# solution4
class Solution4:
def pivotIndex(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
left = 0
right = sum(nums)
for i in range(len(nums)):
right -= nums[i]
if left == right:
return i
left += nums[i]
return -1
|
[
"pengxiaoping1993@gmail.com"
] |
pengxiaoping1993@gmail.com
|
ab6c6b2f9bf5e3b2a925d98eb855913d4591f5d8
|
d200be757bb662cce3bb29a2302cf3356e9452c7
|
/BOJ/BOJ_1260.py
|
32f30c7b3d224a831be93216a59f33e32777a2ec
|
[] |
no_license
|
Jun0414/Algorithm
|
1140ce754a24e6116c40956f7a988fa30f618746
|
ff1782914c5a270a78c90a4a12e65daa22f8ea53
|
refs/heads/master
| 2023-07-08T23:37:41.002949
| 2021-08-14T14:33:58
| 2021-08-14T14:33:58
| 289,309,429
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,121
|
py
|
# 기초 그래프 기본 탐색
# 문제 1260번 DFS와 BFS
# Graph Searching(그래프 탐색), DFS(깊이 우선 탐색), BFS(너비 우선 탐색)
import sys
from collections import deque
r = sys.stdin.readline
def dfs(start):
print(start, end=' ')
visited[start] = 1
for i in graph[start]:
if visited[i] == 0:
dfs(i)
def bfs(start):
bfs_heap = deque()
bfs_heap.append(start)
while bfs_heap:
node = bfs_heap.popleft()
if visited[node] == 0:
visited[node] = 1
print(node, end=' ')
for i in graph[node]:
if visited[i] == 0:
bfs_heap.append(i)
n, m, v = map(int, r().split())
graph = [[] for _ in range(n + 1)]
for _ in range(m):
node1, node2 = map(int, r().split())
graph[node1].append(node2)
graph[node2].append(node1)
for e in graph:
e.sort()
visited = [0] * (n + 1)
dfs(v)
print()
visited = [0] * (n + 1)
bfs(v)
# 입력 예시
# 4 5 1
# 1 2
# 1 3
# 1 4
# 2 4
# 3 4
# 5 5 3
# 5 4
# 5 2
# 1 2
# 3 4
# 3 1
# 1000 1 1000
# 999 1000
# 출력 예시
# 1 2 4 3
# 1 2 3 4
# 3 1 2 5 4
# 3 1 4 2 5
# 1000 999
# 1000 999
|
[
"gbwj123@naver.com"
] |
gbwj123@naver.com
|
a6d7cefff0cfa6857c2094941486b2270c910df7
|
14a5f9a8c95fea365f4af0a9f6e414c3347978d4
|
/python/fib_tail_rec.py
|
ba5619bf5c7b61cd796fa2d820ebf7252392b88a
|
[] |
no_license
|
carlosbazilio/lp
|
a61db3a37c408e9278b202d42bb1ed955aea5067
|
18720b45f581fdb13d4fb51c9f75f1bba4346217
|
refs/heads/master
| 2022-02-18T12:42:32.883848
| 2022-02-09T22:22:25
| 2022-02-09T22:22:25
| 24,647,667
| 6
| 10
| null | 2021-11-01T22:15:01
| 2014-09-30T17:18:25
|
C
|
UTF-8
|
Python
| false
| false
| 192
|
py
|
def fib (x):
def fib_aux (x, ant, curr):
if (x == 1):
return ant
return fib_aux (x-1, curr, ant+curr)
return fib_aux (x, 0, 1)
import sys
print (fib(int(sys.argv[1])))
|
[
"carlosbazilio@gmail.com"
] |
carlosbazilio@gmail.com
|
4feb01172bd737cfe58dbdd3b12b4c3d4d9ef263
|
f167dffa2f767a0419aa82bf434852069a8baeb8
|
/lib/youtube_dl/extractor/peertube.py
|
3af533925c6ad9af98b31a911048ba86fa9ca140
|
[
"MIT"
] |
permissive
|
firsttris/plugin.video.sendtokodi
|
d634490b55149adfdcb62c1af1eb77568b8da3f5
|
1095c58e2bc21de4ab6fcb67a70e4f0f04febbc3
|
refs/heads/master
| 2023-08-18T10:10:39.544848
| 2023-08-15T17:06:44
| 2023-08-15T17:06:44
| 84,665,460
| 111
| 31
|
MIT
| 2022-11-11T08:05:21
| 2017-03-11T16:53:06
|
Python
|
UTF-8
|
Python
| false
| false
| 28,591
|
py
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
int_or_none,
parse_resolution,
str_or_none,
try_get,
unified_timestamp,
url_or_none,
urljoin,
)
class PeerTubeIE(InfoExtractor):
_INSTANCES_RE = r'''(?:
# Taken from https://instances.joinpeertube.org/instances
peertube\.rainbowswingers\.net|
tube\.stanisic\.nl|
peer\.suiri\.us|
medias\.libox\.fr|
videomensoif\.ynh\.fr|
peertube\.travelpandas\.eu|
peertube\.rachetjay\.fr|
peertube\.montecsys\.fr|
tube\.eskuero\.me|
peer\.tube|
peertube\.umeahackerspace\.se|
tube\.nx-pod\.de|
video\.monsieurbidouille\.fr|
tube\.openalgeria\.org|
vid\.lelux\.fi|
video\.anormallostpod\.ovh|
tube\.crapaud-fou\.org|
peertube\.stemy\.me|
lostpod\.space|
exode\.me|
peertube\.snargol\.com|
vis\.ion\.ovh|
videosdulib\.re|
v\.mbius\.io|
videos\.judrey\.eu|
peertube\.osureplayviewer\.xyz|
peertube\.mathieufamily\.ovh|
www\.videos-libr\.es|
fightforinfo\.com|
peertube\.fediverse\.ru|
peertube\.oiseauroch\.fr|
video\.nesven\.eu|
v\.bearvideo\.win|
video\.qoto\.org|
justporn\.cc|
video\.vny\.fr|
peervideo\.club|
tube\.taker\.fr|
peertube\.chantierlibre\.org|
tube\.ipfixe\.info|
tube\.kicou\.info|
tube\.dodsorf\.as|
videobit\.cc|
video\.yukari\.moe|
videos\.elbinario\.net|
hkvideo\.live|
pt\.tux\.tf|
www\.hkvideo\.live|
FIGHTFORINFO\.com|
pt\.765racing\.com|
peertube\.gnumeria\.eu\.org|
nordenmedia\.com|
peertube\.co\.uk|
tube\.darfweb\.eu|
tube\.kalah-france\.org|
0ch\.in|
vod\.mochi\.academy|
film\.node9\.org|
peertube\.hatthieves\.es|
video\.fitchfamily\.org|
peertube\.ddns\.net|
video\.ifuncle\.kr|
video\.fdlibre\.eu|
tube\.22decembre\.eu|
peertube\.harmoniescreatives\.com|
tube\.fabrigli\.fr|
video\.thedwyers\.co|
video\.bruitbruit\.com|
peertube\.foxfam\.club|
peer\.philoxweb\.be|
videos\.bugs\.social|
peertube\.malbert\.xyz|
peertube\.bilange\.ca|
libretube\.net|
diytelevision\.com|
peertube\.fedilab\.app|
libre\.video|
video\.mstddntfdn\.online|
us\.tv|
peertube\.sl-network\.fr|
peertube\.dynlinux\.io|
peertube\.david\.durieux\.family|
peertube\.linuxrocks\.online|
peerwatch\.xyz|
v\.kretschmann\.social|
tube\.otter\.sh|
yt\.is\.nota\.live|
tube\.dragonpsi\.xyz|
peertube\.boneheadmedia\.com|
videos\.funkwhale\.audio|
watch\.44con\.com|
peertube\.gcaillaut\.fr|
peertube\.icu|
pony\.tube|
spacepub\.space|
tube\.stbr\.io|
v\.mom-gay\.faith|
tube\.port0\.xyz|
peertube\.simounet\.net|
play\.jergefelt\.se|
peertube\.zeteo\.me|
tube\.danq\.me|
peertube\.kerenon\.com|
tube\.fab-l3\.org|
tube\.calculate\.social|
peertube\.mckillop\.org|
tube\.netzspielplatz\.de|
vod\.ksite\.de|
peertube\.laas\.fr|
tube\.govital\.net|
peertube\.stephenson\.cc|
bistule\.nohost\.me|
peertube\.kajalinifi\.de|
video\.ploud\.jp|
video\.omniatv\.com|
peertube\.ffs2play\.fr|
peertube\.leboulaire\.ovh|
peertube\.tronic-studio\.com|
peertube\.public\.cat|
peertube\.metalbanana\.net|
video\.1000i100\.fr|
peertube\.alter-nativ-voll\.de|
tube\.pasa\.tf|
tube\.worldofhauru\.xyz|
pt\.kamp\.site|
peertube\.teleassist\.fr|
videos\.mleduc\.xyz|
conf\.tube|
media\.privacyinternational\.org|
pt\.forty-two\.nl|
video\.halle-leaks\.de|
video\.grosskopfgames\.de|
peertube\.schaeferit\.de|
peertube\.jackbot\.fr|
tube\.extinctionrebellion\.fr|
peertube\.f-si\.org|
video\.subak\.ovh|
videos\.koweb\.fr|
peertube\.zergy\.net|
peertube\.roflcopter\.fr|
peertube\.floss-marketing-school\.com|
vloggers\.social|
peertube\.iriseden\.eu|
videos\.ubuntu-paris\.org|
peertube\.mastodon\.host|
armstube\.com|
peertube\.s2s\.video|
peertube\.lol|
tube\.open-plug\.eu|
open\.tube|
peertube\.ch|
peertube\.normandie-libre\.fr|
peertube\.slat\.org|
video\.lacaveatonton\.ovh|
peertube\.uno|
peertube\.servebeer\.com|
peertube\.fedi\.quebec|
tube\.h3z\.jp|
tube\.plus200\.com|
peertube\.eric\.ovh|
tube\.metadocs\.cc|
tube\.unmondemeilleur\.eu|
gouttedeau\.space|
video\.antirep\.net|
nrop\.cant\.at|
tube\.ksl-bmx\.de|
tube\.plaf\.fr|
tube\.tchncs\.de|
video\.devinberg\.com|
hitchtube\.fr|
peertube\.kosebamse\.com|
yunopeertube\.myddns\.me|
peertube\.varney\.fr|
peertube\.anon-kenkai\.com|
tube\.maiti\.info|
tubee\.fr|
videos\.dinofly\.com|
toobnix\.org|
videotape\.me|
voca\.tube|
video\.heromuster\.com|
video\.lemediatv\.fr|
video\.up\.edu\.ph|
balafon\.video|
video\.ivel\.fr|
thickrips\.cloud|
pt\.laurentkruger\.fr|
video\.monarch-pass\.net|
peertube\.artica\.center|
video\.alternanet\.fr|
indymotion\.fr|
fanvid\.stopthatimp\.net|
video\.farci\.org|
v\.lesterpig\.com|
video\.okaris\.de|
tube\.pawelko\.net|
peertube\.mablr\.org|
tube\.fede\.re|
pytu\.be|
evertron\.tv|
devtube\.dev-wiki\.de|
raptube\.antipub\.org|
video\.selea\.se|
peertube\.mygaia\.org|
video\.oh14\.de|
peertube\.livingutopia\.org|
peertube\.the-penguin\.de|
tube\.thechangebook\.org|
tube\.anjara\.eu|
pt\.pube\.tk|
video\.samedi\.pm|
mplayer\.demouliere\.eu|
widemus\.de|
peertube\.me|
peertube\.zapashcanon\.fr|
video\.latavernedejohnjohn\.fr|
peertube\.pcservice46\.fr|
peertube\.mazzonetto\.eu|
video\.irem\.univ-paris-diderot\.fr|
video\.livecchi\.cloud|
alttube\.fr|
video\.coop\.tools|
video\.cabane-libre\.org|
peertube\.openstreetmap\.fr|
videos\.alolise\.org|
irrsinn\.video|
video\.antopie\.org|
scitech\.video|
tube2\.nemsia\.org|
video\.amic37\.fr|
peertube\.freeforge\.eu|
video\.arbitrarion\.com|
video\.datsemultimedia\.com|
stoptrackingus\.tv|
peertube\.ricostrongxxx\.com|
docker\.videos\.lecygnenoir\.info|
peertube\.togart\.de|
tube\.postblue\.info|
videos\.domainepublic\.net|
peertube\.cyber-tribal\.com|
video\.gresille\.org|
peertube\.dsmouse\.net|
cinema\.yunohost\.support|
tube\.theocevaer\.fr|
repro\.video|
tube\.4aem\.com|
quaziinc\.com|
peertube\.metawurst\.space|
videos\.wakapo\.com|
video\.ploud\.fr|
video\.freeradical\.zone|
tube\.valinor\.fr|
refuznik\.video|
pt\.kircheneuenburg\.de|
peertube\.asrun\.eu|
peertube\.lagob\.fr|
videos\.side-ways\.net|
91video\.online|
video\.valme\.io|
video\.taboulisme\.com|
videos-libr\.es|
tv\.mooh\.fr|
nuage\.acostey\.fr|
video\.monsieur-a\.fr|
peertube\.librelois\.fr|
videos\.pair2jeux\.tube|
videos\.pueseso\.club|
peer\.mathdacloud\.ovh|
media\.assassinate-you\.net|
vidcommons\.org|
ptube\.rousset\.nom\.fr|
tube\.cyano\.at|
videos\.squat\.net|
video\.iphodase\.fr|
peertube\.makotoworkshop\.org|
peertube\.serveur\.slv-valbonne\.fr|
vault\.mle\.party|
hostyour\.tv|
videos\.hack2g2\.fr|
libre\.tube|
pire\.artisanlogiciel\.net|
videos\.numerique-en-commun\.fr|
video\.netsyms\.com|
video\.die-partei\.social|
video\.writeas\.org|
peertube\.swarm\.solvingmaz\.es|
tube\.pericoloso\.ovh|
watching\.cypherpunk\.observer|
videos\.adhocmusic\.com|
tube\.rfc1149\.net|
peertube\.librelabucm\.org|
videos\.numericoop\.fr|
peertube\.koehn\.com|
peertube\.anarchmusicall\.net|
tube\.kampftoast\.de|
vid\.y-y\.li|
peertube\.xtenz\.xyz|
diode\.zone|
tube\.egf\.mn|
peertube\.nomagic\.uk|
visionon\.tv|
videos\.koumoul\.com|
video\.rastapuls\.com|
video\.mantlepro\.com|
video\.deadsuperhero\.com|
peertube\.musicstudio\.pro|
peertube\.we-keys\.fr|
artitube\.artifaille\.fr|
peertube\.ethernia\.net|
tube\.midov\.pl|
peertube\.fr|
watch\.snoot\.tube|
peertube\.donnadieu\.fr|
argos\.aquilenet\.fr|
tube\.nemsia\.org|
tube\.bruniau\.net|
videos\.darckoune\.moe|
tube\.traydent\.info|
dev\.videos\.lecygnenoir\.info|
peertube\.nayya\.org|
peertube\.live|
peertube\.mofgao\.space|
video\.lequerrec\.eu|
peertube\.amicale\.net|
aperi\.tube|
tube\.ac-lyon\.fr|
video\.lw1\.at|
www\.yiny\.org|
videos\.pofilo\.fr|
tube\.lou\.lt|
choob\.h\.etbus\.ch|
tube\.hoga\.fr|
peertube\.heberge\.fr|
video\.obermui\.de|
videos\.cloudfrancois\.fr|
betamax\.video|
video\.typica\.us|
tube\.piweb\.be|
video\.blender\.org|
peertube\.cat|
tube\.kdy\.ch|
pe\.ertu\.be|
peertube\.social|
videos\.lescommuns\.org|
tv\.datamol\.org|
videonaute\.fr|
dialup\.express|
peertube\.nogafa\.org|
megatube\.lilomoino\.fr|
peertube\.tamanoir\.foucry\.net|
peertube\.devosi\.org|
peertube\.1312\.media|
tube\.bootlicker\.party|
skeptikon\.fr|
video\.blueline\.mg|
tube\.homecomputing\.fr|
tube\.ouahpiti\.info|
video\.tedomum\.net|
video\.g3l\.org|
fontube\.fr|
peertube\.gaialabs\.ch|
tube\.kher\.nl|
peertube\.qtg\.fr|
video\.migennes\.net|
tube\.p2p\.legal|
troll\.tv|
videos\.iut-orsay\.fr|
peertube\.solidev\.net|
videos\.cemea\.org|
video\.passageenseine\.fr|
videos\.festivalparminous\.org|
peertube\.touhoppai\.moe|
sikke\.fi|
peer\.hostux\.social|
share\.tube|
peertube\.walkingmountains\.fr|
videos\.benpro\.fr|
peertube\.parleur\.net|
peertube\.heraut\.eu|
tube\.aquilenet\.fr|
peertube\.gegeweb\.eu|
framatube\.org|
thinkerview\.video|
tube\.conferences-gesticulees\.net|
peertube\.datagueule\.tv|
video\.lqdn\.fr|
tube\.mochi\.academy|
media\.zat\.im|
video\.colibris-outilslibres\.org|
tube\.svnet\.fr|
peertube\.video|
peertube3\.cpy\.re|
peertube2\.cpy\.re|
videos\.tcit\.fr|
peertube\.cpy\.re|
canard\.tube
)'''
_UUID_RE = r'[\da-fA-F]{8}-[\da-fA-F]{4}-[\da-fA-F]{4}-[\da-fA-F]{4}-[\da-fA-F]{12}'
_API_BASE = 'https://%s/api/v1/videos/%s/%s'
_VALID_URL = r'''(?x)
(?:
peertube:(?P<host>[^:]+):|
https?://(?P<host_2>%s)/(?:videos/(?:watch|embed)|api/v\d/videos)/
)
(?P<id>%s)
''' % (_INSTANCES_RE, _UUID_RE)
_TESTS = [{
'url': 'https://framatube.org/videos/watch/9c9de5e8-0a1e-484a-b099-e80766180a6d',
'md5': '9bed8c0137913e17b86334e5885aacff',
'info_dict': {
'id': '9c9de5e8-0a1e-484a-b099-e80766180a6d',
'ext': 'mp4',
'title': 'What is PeerTube?',
'description': 'md5:3fefb8dde2b189186ce0719fda6f7b10',
'thumbnail': r're:https?://.*\.(?:jpg|png)',
'timestamp': 1538391166,
'upload_date': '20181001',
'uploader': 'Framasoft',
'uploader_id': '3',
'uploader_url': 'https://framatube.org/accounts/framasoft',
'channel': 'Les vidéos de Framasoft',
'channel_id': '2',
'channel_url': 'https://framatube.org/video-channels/bf54d359-cfad-4935-9d45-9d6be93f63e8',
'language': 'en',
'license': 'Attribution - Share Alike',
'duration': 113,
'view_count': int,
'like_count': int,
'dislike_count': int,
'tags': ['framasoft', 'peertube'],
'categories': ['Science & Technology'],
}
}, {
# Issue #26002
'url': 'peertube:spacepub.space:d8943b2d-8280-497b-85ec-bc282ec2afdc',
'info_dict': {
'id': 'd8943b2d-8280-497b-85ec-bc282ec2afdc',
'ext': 'mp4',
'title': 'Dot matrix printer shell demo',
'uploader_id': '3',
'timestamp': 1587401293,
'upload_date': '20200420',
'uploader': 'Drew DeVault',
}
}, {
'url': 'https://peertube.tamanoir.foucry.net/videos/watch/0b04f13d-1e18-4f1d-814e-4979aa7c9c44',
'only_matching': True,
}, {
# nsfw
'url': 'https://tube.22decembre.eu/videos/watch/9bb88cd3-9959-46d9-9ab9-33d2bb704c39',
'only_matching': True,
}, {
'url': 'https://tube.22decembre.eu/videos/embed/fed67262-6edb-4d1c-833b-daa9085c71d7',
'only_matching': True,
}, {
'url': 'https://tube.openalgeria.org/api/v1/videos/c1875674-97d0-4c94-a058-3f7e64c962e8',
'only_matching': True,
}, {
'url': 'peertube:video.blender.org:b37a5b9f-e6b5-415c-b700-04a5cd6ec205',
'only_matching': True,
}]
@staticmethod
def _extract_peertube_url(webpage, source_url):
mobj = re.match(
r'https?://(?P<host>[^/]+)/videos/(?:watch|embed)/(?P<id>%s)'
% PeerTubeIE._UUID_RE, source_url)
if mobj and any(p in webpage for p in (
'<title>PeerTube<',
'There will be other non JS-based clients to access PeerTube',
'>We are sorry but it seems that PeerTube is not compatible with your web browser.<')):
return 'peertube:%s:%s' % mobj.group('host', 'id')
@staticmethod
def _extract_urls(webpage, source_url):
entries = re.findall(
r'''(?x)<iframe[^>]+\bsrc=["\'](?P<url>(?:https?:)?//%s/videos/embed/%s)'''
% (PeerTubeIE._INSTANCES_RE, PeerTubeIE._UUID_RE), webpage)
if not entries:
peertube_url = PeerTubeIE._extract_peertube_url(webpage, source_url)
if peertube_url:
entries = [peertube_url]
return entries
def _call_api(self, host, video_id, path, note=None, errnote=None, fatal=True):
return self._download_json(
self._API_BASE % (host, video_id, path), video_id,
note=note, errnote=errnote, fatal=fatal)
def _get_subtitles(self, host, video_id):
captions = self._call_api(
host, video_id, 'captions', note='Downloading captions JSON',
fatal=False)
if not isinstance(captions, dict):
return
data = captions.get('data')
if not isinstance(data, list):
return
subtitles = {}
for e in data:
language_id = try_get(e, lambda x: x['language']['id'], compat_str)
caption_url = urljoin('https://%s' % host, e.get('captionPath'))
if not caption_url:
continue
subtitles.setdefault(language_id or 'en', []).append({
'url': caption_url,
})
return subtitles
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
host = mobj.group('host') or mobj.group('host_2')
video_id = mobj.group('id')
video = self._call_api(
host, video_id, '', note='Downloading video JSON')
title = video['name']
formats = []
files = video.get('files') or []
for playlist in (video.get('streamingPlaylists') or []):
if not isinstance(playlist, dict):
continue
playlist_files = playlist.get('files')
if not (playlist_files and isinstance(playlist_files, list)):
continue
files.extend(playlist_files)
for file_ in files:
if not isinstance(file_, dict):
continue
file_url = url_or_none(file_.get('fileUrl'))
if not file_url:
continue
file_size = int_or_none(file_.get('size'))
format_id = try_get(
file_, lambda x: x['resolution']['label'], compat_str)
f = parse_resolution(format_id)
f.update({
'url': file_url,
'format_id': format_id,
'filesize': file_size,
})
if format_id == '0p':
f['vcodec'] = 'none'
else:
f['fps'] = int_or_none(file_.get('fps'))
formats.append(f)
self._sort_formats(formats)
description = video.get('description')
if len(description) >= 250:
# description is shortened
full_description = self._call_api(
host, video_id, 'description', note='Downloading description JSON',
fatal=False)
if isinstance(full_description, dict):
description = str_or_none(full_description.get('description')) or description
subtitles = self.extract_subtitles(host, video_id)
def data(section, field, type_):
return try_get(video, lambda x: x[section][field], type_)
def account_data(field, type_):
return data('account', field, type_)
def channel_data(field, type_):
return data('channel', field, type_)
category = data('category', 'label', compat_str)
categories = [category] if category else None
nsfw = video.get('nsfw')
if nsfw is bool:
age_limit = 18 if nsfw else 0
else:
age_limit = None
webpage_url = 'https://%s/videos/watch/%s' % (host, video_id)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': urljoin(webpage_url, video.get('thumbnailPath')),
'timestamp': unified_timestamp(video.get('publishedAt')),
'uploader': account_data('displayName', compat_str),
'uploader_id': str_or_none(account_data('id', int)),
'uploader_url': url_or_none(account_data('url', compat_str)),
'channel': channel_data('displayName', compat_str),
'channel_id': str_or_none(channel_data('id', int)),
'channel_url': url_or_none(channel_data('url', compat_str)),
'language': data('language', 'id', compat_str),
'license': data('licence', 'label', compat_str),
'duration': int_or_none(video.get('duration')),
'view_count': int_or_none(video.get('views')),
'like_count': int_or_none(video.get('likes')),
'dislike_count': int_or_none(video.get('dislikes')),
'age_limit': age_limit,
'tags': try_get(video, lambda x: x['tags'], list),
'categories': categories,
'formats': formats,
'subtitles': subtitles,
'webpage_url': webpage_url,
}
|
[
"noreply@github.com"
] |
firsttris.noreply@github.com
|
da569cc273c2ae1901e1ae047d0991987d2a84aa
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/coverage-big-3752.py
|
4a0c5e323805e1b8e94e96f57436f92568c87018
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,343
|
py
|
count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo($Exp) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
94652b2790c9e1bf47b28be0a5263c23b1653f41
|
4a2073876a2ea3cf6d12232f31d23ffbd4bbcd1d
|
/Animais/animais.py
|
746b187ea6a96d81bef57f8fd263c0ca48ea7b6f
|
[] |
no_license
|
programadorpython/PPAulasAoVivo3
|
d493d466b74263f394437236afcc0c7fdc5c97c6
|
b5c48590442078cba60cefa44e38a5b05d7bb648
|
refs/heads/master
| 2022-11-19T06:30:54.284587
| 2020-07-22T00:53:07
| 2020-07-22T00:53:07
| 281,537,631
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,135
|
py
|
class Animal:
"""
Modelando um animal
"""
def __init__(self, nome, idade, sexo, cor, tipo, classe):
"""
Inicializa um animal
:param nome: string
:param idade: int
:param sexo: string
:param cor: string
:param tipo: string
:param classe: sring
"""
self.nome = nome.title()
self.idade = idade
self.sexo = sexo
self.cor = cor
self.tipo = tipo.title()
self.classe = classe.title()
def animal_detalhes(self):
"""
Mostra detalhes do animal
:return:
"""
mensagem = f"O animal do tipo {self.tipo} é da classe {self.classe}: " \
f"\n\tnome: {self.nome} " \
f"\n\tidade: {self.idade} " \
f"\n\tsexo: {self.sexo} " \
f"\n\tcor: {self.cor}"
print(f"\n{mensagem}")
def mensagem(self):
"""
Mostra mensagem com o nome do animal
:return:
"""
saudacao = f"Olá {self.nome}, seja bem vindo ao nosso Pet Shop!"
print(f"\n{saudacao}")
|
[
"jefferson2004@gmail.com"
] |
jefferson2004@gmail.com
|
c62b581e8e42c66c37405503791022fec73e8e93
|
7fe56d3a94bcd1f44b9a949030982f6eeae5b029
|
/tests/__init__.py
|
1cb9b7e077b002feb8d39e3ca7b977d48303dacf
|
[
"MIT"
] |
permissive
|
rcourivaud/MagicLogger
|
d128e78efea21243e18b96759c4b3a09022e68ff
|
135f9b314428717909f4cda17298db7100b1e7d3
|
refs/heads/master
| 2021-07-05T16:53:06.522476
| 2018-11-19T16:32:51
| 2018-11-19T16:32:51
| 111,118,407
| 0
| 2
|
MIT
| 2018-06-18T09:09:20
| 2017-11-17T15:23:06
|
Python
|
UTF-8
|
Python
| false
| false
| 70
|
py
|
# -*- coding: utf-8 -*-
"""Unit test package for logstash_logger."""
|
[
"r.courivaud@gmail.com"
] |
r.courivaud@gmail.com
|
bf76b1dfcf4b11203e9950456b21b7a94dcb8d0d
|
17058fc47997f47f19337a2244aa69207b86273d
|
/misc/create_labels.py
|
272f515a01f2d2191ef4ce529bc7c19da47ab053
|
[
"MIT"
] |
permissive
|
MISTLab/of-obstacledetection
|
da90cbb42527011d662925f5289ec5adbed2a9b3
|
a2281a37b4cc6482eb87546fa414fdaa38ec04e5
|
refs/heads/master
| 2023-02-22T15:23:12.587480
| 2020-06-06T13:28:25
| 2020-06-06T13:28:25
| 223,396,484
| 13
| 2
|
MIT
| 2023-02-02T06:00:26
| 2019-11-22T12:15:52
|
Python
|
UTF-8
|
Python
| false
| false
| 2,352
|
py
|
#!/usr/bin/env python3
"""
create_labels.py
Script for creating labels for a new collision dataset.
Written by Moritz Sperling
Licensed under the MIT License (see LICENSE for details)
"""
import cv2
import glob
import os, sys
import absl.flags as gflags
localpath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, localpath + '/../workflow/util/')
from common_flags import FLAGS
from misc_utils import get_experiment_folders
def _main():
# init variables
filename = "labels.txt"
folder = FLAGS.test_dir
# iterate subdirectories
dirs = get_experiment_folders(folder)
for subdir in dirs:
# ignore hidden folders
if subdir[0] != '.':
# get filenames
path = os.path.join(folder, subdir, 'images/*.jpg')
imgs = sorted(glob.glob(path))
labels = []
collision = False
# iterate through imgs
for i, fname in enumerate(imgs):
# load image and prep for dronet
img = cv2.imread(fname, cv2.IMREAD_COLOR)
cv2.putText(img, "[#{:03d}]".format(i), (10, 30),
cv2.FONT_HERSHEY_SIMPLEX,
1.0, (0, 0, 255), 2, lineType=30)
# show img and label as collision when space is pressed
cv2.imshow('frame',img)
if cv2.waitKey(100) & 0xFF == ord(' '):
# de/activate collision
collision = not collision
# append current label
if collision:
labels.append(1)
print('collision')
else:
labels.append(0)
print('path clear')
cv2.destroyAllWindows()
# produce labels file
if len(labels) > 2:
outfile = os.path.join(folder, subdir, filename)
with open(outfile, 'w') as f:
for item in labels:
f.write("%s\n" % str(item))
f.close()
def main(argv):
# Utility main to load flags
try:
FLAGS(argv) # parse flags
except gflags.Error:
print ('Usage: %s ARGS\\n%s' % (sys.argv[0], FLAGS))
sys.exit(1)
_main()
if __name__ == "__main__":
main(sys.argv)
|
[
"mail@nfinitedesign.de"
] |
mail@nfinitedesign.de
|
9f936947d909992c5e8fdc0c4e7a0cee5501ae5a
|
7551f4644e2145637e9549caa2dd7dd9eaf2bc87
|
/mCRPCcode/AlternativeModel_Sto_forced_LongRun/ADTplusAbi_Adaptive0_forced.py
|
bc6ed1d509bfac536445c5ac8d7686a68c9ad9c4
|
[] |
no_license
|
mariandm/FoundationsQBios
|
f3af94724b7b2c7e37aebd6454fde7632c29549c
|
734d16157cafe28e795c2012b9275b881d60af92
|
refs/heads/master
| 2020-07-21T05:08:47.066509
| 2019-12-11T11:30:16
| 2019-12-11T11:30:16
| 206,760,121
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,447
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 27 16:15:13 2019
@author: mariandm
"""
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import time
import copy
start_time = time.time()
ESS = 1e4*np.array([[1.1111,0.5555,0.0000,3.3333],[1.1111,0.5556,0.0000,3.3333],[1.1157,0.4545,0.1240,3.3884],[1.1842,0.5263,0.0000,3.4211],[1.1842,0.5263,0.0000,3.4211],[1.1111,0.5556,0.0000,3.3333],[1.1111,0.5556,0.0000,3.3333],[1.1842,0.5263,0.0000,3.4211],[1.1842,0.5263,0.0000,3.4211],[1.1842,0.5263,0.0000,3.4211],[1.2500,0.5000,0.0000,3.5000],[1.2500,0.5000,0.0000,3.5000],[1.2500,0.5000,0.0000,3.5000],[1.2500,0.5000,0.0000,3.5000],[1.2500,0.5000,0.0000,3.5000],[1.2500,0.5000,0.0000,3.5000],[1.3750,0.3125,0.0000,3.3750],[1.3750,0.3125,0.0000,3.3750],[1.3750,0.3125,0.0000,3.3750],[1.3750,0.3125,0.0000,3.3750],[1.3750,0.3125,0.0000,3.3750],[1.3750,0.3125,0.0000,3.3750]])
matrixCoefficients = np.array([[0.7,0.9,0.4,0.6,0.5,0.8],[0.7,0.8,0.4,0.6,0.5,0.9],[0.6,0.9,0.4,0.8,0.5,0.7],[0.6,0.9,0.4,0.7,0.5,0.8],[0.6,0.8,0.4,0.7,0.5,0.9],[0.7,0.9,0.4,0.5,0.6,0.8],[0.7,0.8,0.4,0.5,0.6,0.9],[0.6,0.9,0.4,0.5,0.7,0.8],[0.6,0.8,0.4,0.5,0.7,0.9],[0.6,0.7,0.4,0.5,0.8,0.9],[0.5,0.9,0.4,0.8,0.6,0.7],[0.5,0.9,0.4,0.7,0.6,0.8],[0.5,0.8,0.4,0.7,0.6,0.9],[0.5,0.9,0.4,0.6,0.7,0.8],[0.5,0.8,0.4,0.6,0.7,0.9],[0.5,0.7,0.4,0.6,0.8,0.9],[0.4,0.9,0.5,0.8,0.6,0.7],[0.4,0.9,0.5,0.7,0.6,0.8],[0.4,0.8,0.5,0.7,0.6,0.9],[0.4,0.9,0.5,0.6,0.7,0.8],[0.4,0.8,0.5,0.6,0.7,0.9],[0.4,0.7,0.5,0.6,0.8,0.9]])
# Set matrixIndex = 7 for Representative patient #1
# Set matrixInded = 5 for Representative patient #2
TreatmentsVector=["Adaptive"]
subtreatmentsVector=[[0]]
#Why is there a "scale factor"
scale = .01
r = np.array([0.27726, 0.34657, 0.66542])
r = r*scale
# PSA dynamics
sigmaPSA = 0.5;
# Set simulation time.
maxSimulationTime = 10000
replicateNumber = 50
dicti=dict.fromkeys(TreatmentsVector)
for i in range(len(TreatmentsVector)):
dicti[TreatmentsVector[i]]=dict.fromkeys(subtreatmentsVector[i])
for j in range(len(subtreatmentsVector[i])):
dicti[TreatmentsVector[i]][subtreatmentsVector[i][j]]=dict.fromkeys(range(22))
for k in range(22):
dicti[TreatmentsVector[i]][subtreatmentsVector[i][j]][k]=dict.fromkeys(range(3))
for l in range(3):
dicti[TreatmentsVector[i]][subtreatmentsVector[i][j]][k][l]=dict.fromkeys(["count","Time","PSA"],0)
dicti[TreatmentsVector[i]][subtreatmentsVector[i][j]][k][l]["densities"]=np.array([0.0,0.0,0.0])
#############################we start all the fors here########################
for curr_treatmentType in range(len(TreatmentsVector)):
treatmentType = TreatmentsVector[curr_treatmentType]
for curr_subtreatmentType in subtreatmentsVector[curr_treatmentType]:
subtreatmentType = curr_subtreatmentType
for curr_matrixIndex in range(len(ESS)):
matrixIndex = curr_matrixIndex
alphas = matrixCoefficients[matrixIndex,:]
#Initial tumor densities set at 40% of ESS values
y0 = ESS[matrixIndex, :]* 0.4
y0 = np.ceil(y0)
if (y0[2]<1):y0[2]=1
#Give abiraterone at what % of ESS PSA?
maxPSAPercent = 0.8
PSA_zenith = ESS[matrixIndex,3] * maxPSAPercent
PSA_nadir = PSA_zenith * maxPSAPercent/2
for curr_replicate in range(1,replicateNumber+1):
AbiOnOffFlag = [0]
ADTOnOffFlag = [0]
# Set initial state.
y = copy.deepcopy(y0)
# Create and initialize matrix for ODE solution
allSolution = []
allSolution.append(list(y))
time=[0]
firstTreatment=1
count=True
warning=False
while time[-1] < maxSimulationTime:
if treatmentType=="MTD":
if firstTreatment==1:
if y[3]<PSA_zenith:
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(0)
else:
firstTreatment=0
else:
if subtreatmentType==0:
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(1)
elif subtreatmentType==1:
AbiOnOffFlag.append(1)
ADTOnOffFlag.append(1)
elif subtreatmentType==2:
if y[3] < PSA_zenith:
warning=True
if(y[3]>PSA_zenith and warning):
count=False
if(count):
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(1)
else:
AbiOnOffFlag.append(1)
ADTOnOffFlag.append(1)
if treatmentType=="NoTreatment":
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(0)
# Adaptive Abi is built during the simulation. Turns Abi on once the
# PSA zenith value is reached and turns it off once the nadir is reached.
if treatmentType=="Metronomic":
if firstTreatment==1:
if y[3]<PSA_zenith:
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(0)
else:
firstTreatment=0
firstTreatmentTime=time[-1]
tot=firstTreatmentTime+2000
abo=[]
adto=[]
cutTimes=[]
if subtreatmentType==0:#ADT+Abi, none
while tot < maxSimulationTime:
#ADT+abi
tot+=200
abo.append(1)
adto.append(1)
cutTimes.append(tot)
#none
tot+=1000
cutTimes.append(tot)
abo.append(0)
adto.append(0)
cutTimes=[firstTreatmentTime, firstTreatmentTime+800, firstTreatmentTime+2000]+cutTimes
abo=[0,0,0]+abo
adto=[0,1,0]+adto
elif subtreatmentType==1: #ADT+Abi, none, ADT, none
while tot < maxSimulationTime:
tot+=200
abo.append(1)
adto.append(1)
cutTimes.append(tot)
tot+=1000
cutTimes.append(tot)
abo.append(0)
adto.append(0)
tot+=200
cutTimes.append(tot)
abo.append(0)
adto.append(1)
tot+=1000
cutTimes.append(tot)
abo.append(0)
adto.append(0)
cutTimes=[firstTreatmentTime, firstTreatmentTime+800, firstTreatmentTime+2000]+cutTimes
abo=[0,0,0]+abo
adto=[0,1,0]+adto
elif subtreatmentType==2: #ADT+abi, ADT, none
while tot < maxSimulationTime:
tot+=200
abo.append(1)
adto.append(1)
cutTimes.append(tot)
tot+=200
cutTimes.append(tot)
abo.append(0)
adto.append(1)
tot+=1000
cutTimes.append(tot)
abo.append(0)
adto.append(0)
cutTimes=[firstTreatmentTime, firstTreatmentTime+800, firstTreatmentTime+2000]+cutTimes
abo=[0,0,0]+abo
adto=[0,1,0]+adto
elif subtreatmentType==3: #ADT, ADT+abi, none
while tot < maxSimulationTime:
tot+=200
abo.append(0)
adto.append(1)
cutTimes.append(tot)
tot+=200
cutTimes.append(tot)
abo.append(1)
adto.append(1)
tot+=1000
cutTimes.append(tot)
abo.append(0)
adto.append(0)
cutTimes=[firstTreatmentTime, firstTreatmentTime+800, firstTreatmentTime+2000]+cutTimes
abo=[0,0,0]+abo
adto=[0,1,0]+adto
else:
AbiOnOffFlag.append(abo[np.where(time[-1]<np.array(cutTimes))[0][0]])
ADTOnOffFlag.append(adto[np.where(time[-1]<np.array(cutTimes))[0][0]])
if treatmentType=='Adaptive':
if subtreatmentType==0:
if y[3] > PSA_zenith:
AbiOnOffFlag.append(1)
ADTOnOffFlag.append(1)
elif (y[3] < PSA_nadir):
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(0)
else:
AbiOnOffFlag.append(AbiOnOffFlag[-1])
ADTOnOffFlag.append(AbiOnOffFlag[-1])
elif subtreatmentType==1:
if y[3] > PSA_zenith:
if(count):
AbiOnOffFlag.append(1)
ADTOnOffFlag.append(1)
else:
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(1)
elif (y[3] < PSA_nadir):
AbiOnOffFlag.append(0)
ADTOnOffFlag.append(0)
else:
AbiOnOffFlag.append(AbiOnOffFlag[-1])
ADTOnOffFlag.append(AbiOnOffFlag[-1])
if ((ADTOnOffFlag[-2] - ADTOnOffFlag[-1])>0):
count=not(count)
if (ADTOnOffFlag[-1] == 0):
k = [15000, 10000, 10000]
# If Abi is being given, then use Abi parameters.
elif (AbiOnOffFlag[-1] == 1):
k = [y[1] * 0.5, 100, 10000]
# If Abi is not being given, use naive parameters.
elif AbiOnOffFlag[-1] == 0:
k = [y[1] * 1.5, 10000, 10000]
y[3]= y[3] + sum(y[0:3]) - sigmaPSA * y[3]
dydt = np.zeros([6])
#T+ growth
dydt[0] = y[0] * r[0]
#T+ death
if k[0]== 0:
dydt[1]=0
else:
dydt[1] = y[0] * r[0] * ( ( y[0] + alphas[0] * y[1] + alphas[1] * y[2] ) / k[0] )
#TP growth
dydt[2] = y[1] * r[1]
#TP death
dydt[3] = y[1] * r[1] * ( ( alphas[2] * y[0] + y[1] + alphas[3] * y[2] ) / k[1] )
#T- growth
dydt[4] = y[2] * r[2]
#T- death
dydt[5] = y[2] * r[2] * ( ( alphas[4] * y[0] + alphas[5] * y[1] + y[2] ) / k[2] )
dt= -(1/sum(dydt))*np.log(np.random.uniform())
time.append(time[-1] + dt)
#reaction = np.argmin(dydt/sum(dydt)<np.random.uniform())
reaction = np.where(np.random.uniform()<=np.cumsum(dydt)/sum(dydt))[0][0]
if reaction==0:
y[0]=y[0]+1
elif reaction==1:
y[0]=y[0]-1
elif reaction==2:
y[1]=y[1]+1
elif reaction==3:
y[1]=y[1]-1
elif reaction==4:
y[2]=y[2]+1
else:
y[2]=y[2]-1
########################################################################
if (y[2]<1):y[2]=1
########################################################################
allSolution.append(list(y))
allSolution=np.array(allSolution)
dicti[treatmentType][subtreatmentType][matrixIndex][np.argmax(y[0:3])]["count"]+=1
dicti[treatmentType][subtreatmentType][matrixIndex][np.argmax(y[0:3])]["PSA"]+=y[3]
dicti[treatmentType][subtreatmentType][matrixIndex][np.argmax(y[0:3])]["densities"]+=y[0:3]
if np.sum(np.logical_not(np.argmax(allSolution[:,0:3],1)==np.argmax(y[0:3])))>0:
dicti[treatmentType][subtreatmentType][matrixIndex][np.argmax(y[0:3])]["Time"]+=time[np.where(np.logical_not(np.argmax(allSolution[:,0:3],1)==np.argmax(y[0:3])))[0][-1]+1]
for cellType in range(3):
divide= dicti[treatmentType][subtreatmentType][matrixIndex][cellType]["count"]
if divide>0:
dicti[treatmentType][subtreatmentType][matrixIndex][cellType]["PSA"]/=divide
dicti[treatmentType][subtreatmentType][matrixIndex][cellType]["densities"]/=divide
dicti[treatmentType][subtreatmentType][matrixIndex][cellType]["Time"]/=divide
f = open("./nope/Run_forced_"+treatmentType+"_subtreat_"+str(subtreatmentType)+".txt", "w")
f.write(str(dicti))
f.close()
|
[
"noreply@github.com"
] |
mariandm.noreply@github.com
|
9cc040fe1ae166f3a782886247586b053353c86e
|
9c2b14de3cb746318aa17a67ba60fe0c48e45cba
|
/node_modules/mongodb/node_modules/bson/build/config.gypi
|
1c1ef6cad6e7fb06cb4beb87a958f8e311a81805
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
shreyaspurohit/SemanticLink
|
5f0510ce32c1639d161aed73151b9c315839f9bc
|
2135e5076cd39a34befc54be4e92e607d02ba8f4
|
refs/heads/master
| 2016-09-05T08:54:54.893557
| 2013-03-09T08:43:39
| 2013-03-09T08:43:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,815
|
gypi
|
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 0,
"gcc_version": 44,
"host_arch": "x64",
"node_install_npm": "true",
"node_install_waf": "true",
"node_prefix": "/",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_unsafe_optimizations": 0,
"node_use_dtrace": "false",
"node_use_etw": "false",
"node_use_openssl": "true",
"target_arch": "x64",
"v8_no_strict_aliasing": 1,
"v8_use_snapshot": "false",
"nodedir": "/home/shreyaspurohit/.node-gyp/0.8.15",
"copy_dev_lib": "true",
"cache_lock_stale": "60000",
"pre": "",
"sign_git_tag": "",
"user_agent": "node/v0.8.15",
"always_auth": "",
"fetch_retries": "2",
"description": "true",
"init_version": "0.0.0",
"user": "1000",
"force": "",
"ignore": "",
"cache_min": "",
"editor": "vi",
"rollback": "true",
"cache_max": "null",
"userconfig": "/home/shreyaspurohit/.npmrc",
"init_author_url": "",
"yes": "",
"init_author_name": "",
"coverage": "",
"tmp": "/home/shreyaspurohit/tmp",
"userignorefile": "/home/shreyaspurohit/.npmignore",
"engine_strict": "",
"usage": "",
"depth": "null",
"save_dev": "",
"https_proxy": "",
"onload_script": "",
"rebuild_bundle": "true",
"save_bundle": "",
"shell": "/bin/bash",
"prefix": "/home/shreyaspurohit/.nvm/v0.8.15",
"registry": "https://registry.npmjs.org/",
"versions": "",
"searchopts": "",
"save_optional": "",
"cache_lock_wait": "10000",
"browser": "",
"cache": "/home/shreyaspurohit/.npm",
"version": "",
"searchsort": "name",
"npaturl": "http://npat.npmjs.org/",
"viewer": "man",
"color": "true",
"fetch_retry_mintimeout": "10000",
"umask": "18",
"fetch_retry_maxtimeout": "60000",
"message": "%s",
"global": "",
"link": "",
"unicode": "true",
"save": "",
"unsafe_perm": "true",
"long": "",
"production": "",
"node_version": "v0.8.15",
"tag": "latest",
"fetch_retry_factor": "10",
"username": "",
"proprietary_attribs": "true",
"npat": "",
"strict_ssl": "true",
"parseable": "",
"globalconfig": "/home/shreyaspurohit/.nvm/v0.8.15/etc/npmrc",
"init_module": "/home/shreyaspurohit/.npm-init.js",
"dev": "",
"globalignorefile": "/home/shreyaspurohit/.nvm/v0.8.15/etc/npmignore",
"cache_lock_retries": "10",
"init_author_email": "",
"searchexclude": "",
"group": "1000",
"optional": "true",
"git": "git",
"json": ""
}
}
|
[
"shreyaspurohit@shreyas-purohit-portable.(none)"
] |
shreyaspurohit@shreyas-purohit-portable.(none)
|
83fad54ec14446e17e20fd4c8fe1018398d8a8c6
|
7a49864e2de257ce2f0c0845c3875d701be45687
|
/shop/main_app/admin.py
|
4e5381125240659c38ed749c66a6c6ff86ba516f
|
[] |
no_license
|
PontificSalivan/Shop
|
be7b805edcbfb8c6300d42e3400dc22ed2235c4d
|
ab1404a6d06a918a13094567cb9006b22457455f
|
refs/heads/master
| 2023-03-03T02:36:35.462871
| 2021-02-12T17:57:42
| 2021-02-12T17:57:42
| 337,730,915
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,350
|
py
|
from PIL import Image
from django.forms import ModelChoiceField, ModelForm, ValidationError
from django.contrib import admin
from django.utils.safestring import mark_safe
from .models import *
class NotebookAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['image'].help_text = mark_safe(
'<span style="color:red;">При загрузке изображения с разрешением {}x{} оно будет сжато</span>'.format(
*Product.MAX_RESOLUTION
)
)
class SmartphoneAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['image'].help_text = mark_safe(
'<span style="color:red;">При загрузке изображения с разрешением {}x{} оно будет сжато</span>'.format(
*Product.MAX_RESOLUTION
)
)
instance = kwargs.get('instance')
if instance and not instance.sd:
self.fields['sd_volume_max'].widget.attrs.update({
'readonly': True, 'style': 'background: lightgray;'
})
def clean(self):
if not self.cleaned_data['sd']:
self.cleaned_data['sd_volume_max'] = None
return self.cleaned_data
class NotebookAdmin(admin.ModelAdmin):
form = NotebookAdminForm
change_form_template = 'admin.html'
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == 'category':
return ModelChoiceField(Category.objects.filter(slug='notebooks'))
return super().formfield_for_foreignkey(db_field, request, **kwargs)
class SmartphoneAdmin(admin.ModelAdmin):
form = SmartphoneAdminForm
change_form_template = 'admin.html'
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == 'category':
return ModelChoiceField(Category.objects.filter(slug='smartphones'))
return super().formfield_for_foreignkey(db_field, request, **kwargs)
admin.site.register(Category)
admin.site.register(Notebook, NotebookAdmin)
admin.site.register(Smartphone, SmartphoneAdmin)
admin.site.register(CartProduct)
admin.site.register(Cart)
admin.site.register(Customer)
|
[
"rasen13@mail.ru"
] |
rasen13@mail.ru
|
63c36811234e4e17445c206a239feee4c4ae236e
|
e1c960d2eb1d9685acc5f72644302d711bc45dae
|
/sidproject/peristiwa/apps.py
|
8803951246cd784e9e1c832cfc9fc8494eb1e32b
|
[] |
no_license
|
prianthon/sidj
|
fbd3111f184423cce5032f02aa5dbcb56cd2705c
|
2c0639e6f96139010a81d1c651ed88763d2c81da
|
refs/heads/master
| 2021-01-11T09:21:49.771465
| 2016-12-24T05:28:20
| 2016-12-24T05:28:20
| 77,035,075
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 93
|
py
|
from django.apps import AppConfig
class PeristiwaConfig(AppConfig):
name = 'peristiwa'
|
[
"prianthonsubardio@gmail.com"
] |
prianthonsubardio@gmail.com
|
07f21dd8ea6c88a00921c3974f0b90fd6078e0e6
|
51888119e10cdff12dafb060a54824632edccf3f
|
/Folders/Python/Postage_old.py
|
f199f740817ab8b4db5d64cbd7f84eb3ac45c0ce
|
[
"BSD-2-Clause"
] |
permissive
|
kuchinal/lamakaha
|
b64511ad8c6d2b36da5a84a266b9e7a69acd3106
|
24e3b2ff53bcac2ad1c0e5a3b9afd4593d85f22d
|
refs/heads/master
| 2023-09-01T17:55:56.551183
| 2023-07-31T19:32:04
| 2023-07-31T19:32:04
| 182,849,747
| 0
| 0
| null | 2021-09-10T06:34:22
| 2019-04-22T19:00:02
|
Python
|
UTF-8
|
Python
| false
| false
| 400
|
py
|
import nuke
def Postage():
t= nuke.selectedNode()
o = t['label'].value()
x = t.xpos()
y = t.ypos()
u = nuke.nodes.PostageStamp(postage_stamp = 1, note_font_size = 20, tile_color = 4000,)
name = u['name'].value()
#p = name.replace('PostageStamp',o)
#nuke.toNode(p)
newname = u['label'].setValue(o)
u['hide_input'].setValue(1)
u.setInput(0,t)
u.setXYpos(x,y+100)
|
[
"lamakaha@gmail.com"
] |
lamakaha@gmail.com
|
48f890e38dcc694d1833976c10b34e0c952ef6b2
|
758d063e13238a58f5212e83208ca302163e017f
|
/tests/func_test.py
|
0664e4f147d26f5a55a303fc1bb4721614b87d2a
|
[] |
no_license
|
misinglink/NLP-parser
|
eadd8f0d190b22d2a34fd7956dbf706edd3bb0df
|
adca58815755bd915f54d43e61f7c7a010657799
|
refs/heads/main
| 2023-03-28T11:19:49.007413
| 2021-04-01T23:51:15
| 2021-04-01T23:51:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 382
|
py
|
import os
import pytest
from func import get_prompt_length, less_than_500
from gptparser import make_prompts
def test_propt_length1():
assert get_prompt_length(["a word is worth how much in integers?"]) == 8
def test_two_sents():
assert get_prompt_length(["two words", "three words", "six words"]) == 6
def prompts_less_than_500():
assert less_than_500() == True
|
[
"cesartole73@gmail.com"
] |
cesartole73@gmail.com
|
9ada37e2dc34abdb0cc7a1ea982d687c03ea3cc8
|
28873fdbfb51b7ae3105ff338d83f3d8e57a3e3b
|
/pytorch/jklhj/DCGANs/config.py
|
de8a48f9452edca41d6a94c1e4717422bcba2ea6
|
[] |
no_license
|
jklhj222/bin
|
7de7f54cb4d7a87053429efc3e2dd7438cb1475b
|
b92d6847648c43c5ccd2e348f50b0892e4913367
|
refs/heads/master
| 2023-04-14T14:49:06.702291
| 2023-03-24T13:15:29
| 2023-03-24T13:15:29
| 132,477,613
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 765
|
py
|
#!/usr/bin/env python3
class DefaultConfig():
use_gpu = True
num_workers = 2
input_size = 96
# for training setting
train_gpu_id = 0
train_netd_model = ''
train_netg_model = ''
train_dir = './data'
train_batch_size = 256
base_lrg = 2e-4
base_lrd = 2e-4
lr_decay_step = 100
g_train_every = 5
d_train_every = 1
max_epoch = 200
adam_beta1 = 0.5
nz = 100
ngf = 64
ndf = 64
show_iter = 50
save_iter = 600
pic_save_dir = './imgs'
# for testing setting
test_img = 'result.png'
test_netg_model = ''
test_netd_model = ''
# pick 64 best pictures of 512 generated pictures
gen_num = 64
gen_search_num = 512
gen_mean = 0
gen_std = 1
|
[
"jklhj222@gmail.com"
] |
jklhj222@gmail.com
|
f85bca6082b992766a03e4274459537f254a3989
|
67287d3ac86c7746d0bac770d79d9a0fd1e89587
|
/envs/myEnvs/CartPoleSwingUp/env/cartpole_swingup_continuous.py
|
f16f2628e994b24c91ca2285d36131f69273f7f7
|
[
"MIT"
] |
permissive
|
UCLA-StarAI/Analogous-Disentangled-Actor-Critic
|
1c37a94dcaac61a45123125f4757fd67ff10595b
|
37cb3ac7e39201a66b7e0b843dcc9926b97e0334
|
refs/heads/master
| 2021-07-15T07:24:50.197301
| 2021-03-18T03:10:04
| 2021-03-18T03:10:04
| 243,092,919
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,284
|
py
|
"""
Cart pole swing-up:
Adapted from:
hardmaru: https://github.com/hardmaru/estool/blob/master/custom_envs/cartpole_swingup.py
Original version from:
https://github.com/zuoxingdong/DeepPILCO/blob/master/cartpole_swingup.py
hardmaru's changes:
More difficult, since dt is 0.05 (not 0.01), and only 200 timesteps
"""
import gym
from gym import spaces
from gym.utils import seeding
import logging
import math
import numpy as np
logger = logging.getLogger(__name__)
class CartPoleSwingUpContinuousEnv(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': 50
}
def __init__(self):
self.g = 9.82 # gravity
self.m_c = 0.5 # cart mass
self.m_p = 0.5 # pendulum mass
self.total_m = (self.m_p + self.m_c)
self.l = 0.6 # pole's length
self.m_p_l = (self.m_p * self.l)
self.force_mag = 10.0
self.dt = 0.01 # seconds between state updates
self.b = 0.1 # friction coefficient
self.t = 0 # timestep
self.t_limit = 1000
# Angle at which to fail the episode
self.theta_threshold_radians = 12 * 2 * math.pi / 360
self.x_threshold = 2.4
high = np.array([
np.finfo(np.float32).max,
np.finfo(np.float32).max,
np.finfo(np.float32).max,
np.finfo(np.float32).max,
np.finfo(np.float32).max])
self.action_space = spaces.Box(-1.0, 1.0, shape=(1,))
self.observation_space = spaces.Box(-high, high)
self.seed()
self.viewer = None
self.state = None
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, action):
# Valid action
action = np.clip(action, -1.0, 1.0)[0]
action *= self.force_mag
state = self.state
x, x_dot, theta, theta_dot = state
s = math.sin(theta)
c = math.cos(theta)
xdot_update = (-2 * self.m_p_l * (
theta_dot ** 2) * s + 3 * self.m_p * self.g * s * c + 4 * action - 4 * self.b * x_dot) / (
4 * self.total_m - 3 * self.m_p * c ** 2)
thetadot_update = (-3 * self.m_p_l * (theta_dot ** 2) * s * c + 6 * self.total_m * self.g * s + 6 * (
action - self.b * x_dot) * c) / (4 * self.l * self.total_m - 3 * self.m_p_l * c ** 2)
x = x + x_dot * self.dt
theta = theta + theta_dot * self.dt
x_dot = x_dot + xdot_update * self.dt
theta_dot = theta_dot + thetadot_update * self.dt
self.state = (x, x_dot, theta, theta_dot)
done = False
if x < -self.x_threshold or x > self.x_threshold:
done = True
self.t += 1
if self.t >= self.t_limit:
done = True
reward_theta = (np.cos(theta) + 1.0) / 2.0
reward_theta = reward_theta if reward_theta > 0.8 else 0.0
reward_x = np.cos((x / self.x_threshold) * (np.pi / 2.0))
reward = reward_theta * reward_x
obs = np.array([x, x_dot, np.cos(theta), np.sin(theta), theta_dot])
return obs, reward, done, {}
def reset(self):
# self.state = self.np_random.normal(loc=np.array([0.0, 0.0, 30*(2*np.pi)/360, 0.0]), scale=np.array([0.0, 0.0, 0.0, 0.0]))
self.state = np.random.normal(loc=np.array([0.0, 0.0, np.pi, 0.0]), scale=np.array([0.2, 0.2, 0.2, 0.2]))
self.steps_beyond_done = None
self.t = 0 # timestep
x, x_dot, theta, theta_dot = self.state
obs = np.array([x, x_dot, np.cos(theta), np.sin(theta), theta_dot])
return obs
def render(self, mode='human', close=False):
if close:
if self.viewer is not None:
self.viewer.close()
self.viewer = None
return
screen_width = 600
screen_height = 600 # before was 400
world_width = 5 # max visible position of cart
scale = screen_width / world_width
carty = screen_height / 2 # TOP OF CART
polewidth = 6.0
polelen = scale * self.l # 0.6 or self.l
cartwidth = 40.0
cartheight = 20.0
if self.viewer is None:
from gym.envs.classic_control import rendering
self.viewer = rendering.Viewer(screen_width, screen_height)
l, r, t, b = -cartwidth / 2, cartwidth / 2, cartheight / 2, -cartheight / 2
cart = rendering.FilledPolygon([(l, b), (l, t), (r, t), (r, b)])
self.carttrans = rendering.Transform()
cart.add_attr(self.carttrans)
cart.set_color(1, 0, 0)
self.viewer.add_geom(cart)
l, r, t, b = -polewidth / 2, polewidth / 2, polelen - polewidth / 2, -polewidth / 2
pole = rendering.FilledPolygon([(l, b), (l, t), (r, t), (r, b)])
pole.set_color(0, 0, 1)
self.poletrans = rendering.Transform(translation=(0, 0))
pole.add_attr(self.poletrans)
pole.add_attr(self.carttrans)
self.viewer.add_geom(pole)
self.axle = rendering.make_circle(polewidth / 2)
self.axle.add_attr(self.poletrans)
self.axle.add_attr(self.carttrans)
self.axle.set_color(0.1, 1, 1)
self.viewer.add_geom(self.axle)
# Make another circle on the top of the pole
self.pole_bob = rendering.make_circle(polewidth / 2)
self.pole_bob_trans = rendering.Transform()
self.pole_bob.add_attr(self.pole_bob_trans)
self.pole_bob.add_attr(self.poletrans)
self.pole_bob.add_attr(self.carttrans)
self.pole_bob.set_color(0, 0, 0)
self.viewer.add_geom(self.pole_bob)
self.wheel_l = rendering.make_circle(cartheight / 4)
self.wheel_r = rendering.make_circle(cartheight / 4)
self.wheeltrans_l = rendering.Transform(translation=(-cartwidth / 2, -cartheight / 2))
self.wheeltrans_r = rendering.Transform(translation=(cartwidth / 2, -cartheight / 2))
self.wheel_l.add_attr(self.wheeltrans_l)
self.wheel_l.add_attr(self.carttrans)
self.wheel_r.add_attr(self.wheeltrans_r)
self.wheel_r.add_attr(self.carttrans)
self.wheel_l.set_color(0, 0, 0) # Black, (B, G, R)
self.wheel_r.set_color(0, 0, 0) # Black, (B, G, R)
self.viewer.add_geom(self.wheel_l)
self.viewer.add_geom(self.wheel_r)
self.track = rendering.Line(
(screen_width / 2 - self.x_threshold * scale, carty - cartheight / 2 - cartheight / 4),
(screen_width / 2 + self.x_threshold * scale, carty - cartheight / 2 - cartheight / 4))
self.track.set_color(0, 0, 0)
self.viewer.add_geom(self.track)
if self.state is None: return None
x = self.state
cartx = x[0] * scale + screen_width / 2.0 # MIDDLE OF CART
self.carttrans.set_translation(cartx, carty)
self.poletrans.set_rotation(x[2])
self.pole_bob_trans.set_translation(-self.l * np.sin(x[2]), self.l * np.cos(x[2]))
return self.viewer.render(return_rgb_array=mode == 'rgb_array')
|
[
"anjiliu219@gmail.com"
] |
anjiliu219@gmail.com
|
7d88d46cc5c57870e82464a384294b667819297e
|
1fc11d568f5a409471c9ea06b1244f3354ef694e
|
/models/topic.py
|
4f150dbe50cf3d49a08df3ca63c6a074766b46b0
|
[] |
no_license
|
wanlikehandsome/bbs
|
022db63910eb55cb2be439ad8c0c631d02425c9f
|
58cbf900035fd4c227f0b66f07484ef0311c804a
|
refs/heads/master
| 2022-04-17T15:55:55.142185
| 2020-04-05T15:59:30
| 2020-04-05T15:59:30
| 255,565,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,263
|
py
|
import time
import json
from models import Model
from models.user import User
from models.mongua import Mongua
import logging
import os
import time
ogger = logging.getLogger("bbs")
class Cache(object):
def get(self, key):
pass
def set(self, key, value):
pass
class MemoryCache(Cache):
def __init__(self):
self.cache = {}
def get(self, key):
return self.cache[key]
def set(self, key, value):
self.cache[key] = value
class RedisCache(Cache):
import redis
redis_db = redis.StrictRedis(host='localhost', port=6379, db=0)
def set(self, key, value):
return RedisCache.redis_db.set(key, value)
def get(self, key):
return RedisCache.redis_db.get(key)
class Topic(Mongua):
__fields__ = Mongua.__fields__ + [
('content', str, ''),
('title', str, -1),
('user_id', int, -1),
('board_id', int, -1),
('views', int, 0)
]
should_update_all = True
# 1. memory cache
cache = MemoryCache()
# 2. redis cahce
redis_cache = RedisCache()
def to_json(self):
"""
将从MongoDB中查询到的对象转化为json格式
:return: json str
"""
d = dict()
for k in Topic.__fields__:
key = k[0]
if not key.startswith('_'):
# 过滤 _id
d[key] = getattr(self,key)
return json.dumps(d)
@classmethod
def from_json(cls, j):
"""
根据json格式的数据, 返回一个topic对象
:param j: josn
:return: topic object
"""
d = json.loads(j)
instance = cls()
for k, v in d.items():
setattr(instance, k, v)
return instance
@classmethod
def all_delay(cls):
return Topic.all()
@classmethod
def get(cls, id):
m = cls.find_by(id=id)
m.views += 1
m.save()
return m
def save(self):
super(Topic, self).save()
should_update_all = True
@classmethod
def cache_all(cls):
"""数据更新一次, 缓存更新一次
:return: topic list
"""
if Topic.should_update_all:
Topic.redis_cache.set('topic_all', json.dumps([i.to_json() for i in cls.all_delay()]))
Topic.should_update_all = False
j = json.loads(Topic.redis_cache.get('topic_all').decode('utf-8'))
j = [Topic.from_json(i) for i in j]
return j
@classmethod
def cache_find(cls, board_id):
"""数据更新一次, 缓存更新一次
:return: topic list
"""
j = json.loads(Topic.redis_cache.get('topic_all').decode('utf-8'))
j = [Topic.from_json(i) for i in j]
topics_in_board = []
for topic_object in j:
if topic_object.board_id == board_id:
topics_in_board.append(topic_object)
return topics_in_board
def replies(self):
from .reply import Reply
ms = Reply.find_all(topic_id=self.id)
return ms
def board(self):
from .board import Board
m = Board.find(self.board_id)
return m
def user(self):
u = User.find(id=self.user_id)
return u
|
[
"2563418641@qq.com"
] |
2563418641@qq.com
|
7565385a1a71e902a08e95a08b0e9204a95bb82e
|
9ad593596a4e19e5a172a350544a4d3c043dc742
|
/apps/dojo_secret/urls.py
|
19d8030c7ee3011fbed48e0600b0d5321b58278e
|
[] |
no_license
|
cryu1994/dojo-secret
|
df646cf62c4aef130f005b46d834ec70456d62e0
|
27db97939bdd1acbfb79f04401869e06ccbc2461
|
refs/heads/master
| 2021-01-20T00:48:00.231449
| 2017-04-24T12:04:10
| 2017-04-24T12:04:10
| 89,191,858
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 402
|
py
|
from django.conf.urls import url
from . import views
# from django.contrib import admin
urlpatterns = [
url(r'^$', views.index),
url(r'^register$', views.register),
url(r'^success$', views.success),
url(r'^users/login$', views.login),
url(r'^user/comments$', views.comment),
url(r'^like/(?P<comment_id>\d+)', views.like),
url(r'^unlike/(?P<comment_id>\d+)', views.like),
]
|
[
"Home@Homes-MacBook-Pro-2.local"
] |
Home@Homes-MacBook-Pro-2.local
|
81dc81454be87fff711af9a9957454ae444737a2
|
bc31f84cc452540c955096d5ae0cd104101c1efc
|
/code/utils/util.py
|
36176d0a9307f67c8579c60ca2d110452189f8e1
|
[] |
no_license
|
szWingLee/spsr-master
|
a4373182258b37128a2fe574603b011698d1cae6
|
f0dff12fe2ce255c41159d946c9adace22f6a474
|
refs/heads/master
| 2022-12-16T07:06:31.451596
| 2020-09-20T06:12:19
| 2020-09-20T06:12:19
| 297,013,495
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,730
|
py
|
import os
import math
from datetime import datetime
import numpy as np
import cv2
from torchvision.utils import make_grid
import random
import torch
import logging
####################
# miscellaneous
####################
def get_timestamp():
return datetime.now().strftime('%y%m%d-%H%M%S')
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
def mkdirs(paths):
if isinstance(paths, str):
mkdir(paths)
else:
for path in paths:
mkdir(path)
def mkdir_and_rename(path):
if os.path.exists(path):
new_name = path + '_archived_' + get_timestamp()
print('Path already exists. Rename it to [{:s}]'.format(new_name))
logger = logging.getLogger('base')
logger.info('Path already exists. Rename it to [{:s}]'.format(new_name))
os.rename(path, new_name)
os.makedirs(path)
def set_random_seed(seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
def setup_logger(logger_name, root, phase, level=logging.INFO, screen=False):
'''set up logger'''
l = logging.getLogger(logger_name)
formatter = logging.Formatter(
'%(asctime)s.%(msecs)03d - %(levelname)s: %(message)s', datefmt='%y-%m-%d %H:%M:%S')
log_file = os.path.join(root, phase + '_{}.log'.format(get_timestamp()))
fh = logging.FileHandler(log_file, mode='w')
fh.setFormatter(formatter)
l.setLevel(level)
l.addHandler(fh)
if screen:
sh = logging.StreamHandler()
sh.setFormatter(formatter)
l.addHandler(sh)
####################
# image convert
####################
def tensor2img(tensor, out_type=np.uint8, min_max=(0, 1)):
'''
Converts a torch Tensor into an image Numpy array
Input: 4D(B,(3/1),H,W), 3D(C,H,W), or 2D(H,W), any range, RGB channel order
Output: 3D(H,W,C) or 2D(H,W), [0,255], np.uint8 (default)
'''
tensor = tensor.squeeze().float().cpu().clamp_(*min_max) # clamp
tensor = (tensor - min_max[0]) / (min_max[1] - min_max[0]) # to range [0,1]
n_dim = tensor.dim()
if n_dim == 4:
n_img = len(tensor)
img_np = make_grid(tensor, nrow=int(math.sqrt(n_img)), normalize=False).numpy()
img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
elif n_dim == 3:
img_np = tensor.numpy()
img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
elif n_dim == 2:
img_np = tensor.numpy()
else:
raise TypeError(
'Only support 4D, 3D and 2D tensor. But received with dimension: {:d}'.format(n_dim))
if out_type == np.uint8:
img_np = (img_np * 255.0).round()
# Important. Unlike matlab, numpy.unit8() WILL NOT round by default.
return img_np.astype(out_type)
def save_img(img, img_path, mode='RGB'):
cv2.imwrite(img_path, img)
####################
# metric
####################
def calculate_psnr(img1, img2):
# img1 and img2 have range [0, 255]
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
mse = np.mean((img1 - img2) ** 2)
if mse == 0:
return float('inf')
return 20 * math.log10(255.0 / math.sqrt(mse))
def ssim(img1, img2):
C1 = (0.01 * 255) ** 2
C2 = (0.03 * 255) ** 2
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
kernel = cv2.getGaussianKernel(11, 1.5)
window = np.outer(kernel, kernel.transpose())
mu1 = cv2.filter2D(img1, -1, window)[5:-5, 5:-5] # valid
mu2 = cv2.filter2D(img2, -1, window)[5:-5, 5:-5]
mu1_sq = mu1 ** 2
mu2_sq = mu2 ** 2
mu1_mu2 = mu1 * mu2
sigma1_sq = cv2.filter2D(img1 ** 2, -1, window)[5:-5, 5:-5] - mu1_sq
sigma2_sq = cv2.filter2D(img2 ** 2, -1, window)[5:-5, 5:-5] - mu2_sq
sigma12 = cv2.filter2D(img1 * img2, -1, window)[5:-5, 5:-5] - mu1_mu2
ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
(sigma1_sq + sigma2_sq + C2))
return ssim_map.mean()
def calculate_ssim(img1, img2):
'''calculate SSIM
the same outputs as MATLAB's
img1, img2: [0, 255]
'''
if not img1.shape == img2.shape:
raise ValueError('Input images must have the same dimensions.')
if img1.ndim == 2:
return ssim(img1, img2)
elif img1.ndim == 3:
if img1.shape[2] == 3:
ssims = []
for i in range(3):
ssims.append(ssim(img1, img2))
return np.array(ssims).mean()
elif img1.shape[2] == 1:
return ssim(np.squeeze(img1), np.squeeze(img2))
else:
raise ValueError('Wrong input image dimensions.')
|
[
"zirong_li@163.com"
] |
zirong_li@163.com
|
04397df6f9217cce1b6ef745433b2018f610d8de
|
64c3095772ec4a7c395f2db508b118e9aa9f7b24
|
/graphene_example/urls.py
|
b181effb67810659b59823b6a1e505edcdf479e9
|
[] |
no_license
|
rastukis/graphene-django-example
|
8d1c8b48d923d10d7af54d9d174bb32e8927f002
|
a9f3d5a04b0ca5632f9f930b2b88efb448d5a9e0
|
refs/heads/master
| 2022-06-20T12:56:00.374562
| 2018-11-19T19:38:40
| 2018-11-19T19:38:40
| 157,759,615
| 0
| 0
| null | 2022-05-25T00:23:12
| 2018-11-15T19:09:14
|
Python
|
UTF-8
|
Python
| false
| false
| 1,099
|
py
|
"""graphene_example URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.urls import path
from django.contrib import admin
from graphene_django.views import GraphQLView
from apps.posts.views import PrivateGraphQLView
urlpatterns = [
path('admin/', admin.site.urls),
# Para usar la autenticacion del usuario
#path('graphql/', PrivateGraphQLView.as_view(graphiql=True)),
# Sin autenticacion
url(r'^graphql', GraphQLView.as_view(graphiql=True)),
]
|
[
"mplascencia.cruz@gmail.com"
] |
mplascencia.cruz@gmail.com
|
068bace6606bcfda49ebc36b73ac984d7a1fadfd
|
5abce69e22007e34bc7625e862d9e082382446ff
|
/CommClusters/mod/gmm.py
|
8f6310fb0439845eb0ffdc1ac31ca4d059e2357d
|
[] |
no_license
|
zaqari/DataScideProjects
|
6ee5a64275ac2436e2e377239869355262a7672c
|
c8d9ad862159f659acf1079bd89bd058e7f28692
|
refs/heads/master
| 2023-06-15T07:51:59.911606
| 2021-07-11T22:24:40
| 2021-07-11T22:24:40
| 385,058,141
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,187
|
py
|
import torch
import torch.nn as nn
import numpy as np
from CommClusters.mod.sel import *
from sklearn.datasets import make_spd_matrix
class gMM():
def __init__(self, K, dims, eps=5e-3):
super(gMM, self).__init__()
self.means = None
self.vars = torch.cat([torch.FloatTensor(make_spd_matrix(dims)).unsqueeze(0) for _ in range(K)], dim=0)
self.k = K
self.eps = eps
self.dims = dims
self.pi = torch.FloatTensor([1/self.k for _ in range(self.k)])
def random_initialize(self, x):
idxs = np.random.choice(len(x), size=(self.k,), replace=False)
self.means = x[idxs].unsqueeze(0)
self.vars = self.covariance(x)
def seed_initialize(self, x, mu):
self.means = mu.unsqueeze(0)
self.vars = self.covariance(x)
def prob(self, x):
N = torch.distributions.MultivariateNormal(self.means, self.vars)
# return torch.exp(N.log_prob(x).T)
return torch.exp(N.log_prob(x).T)
def predict(self,x):
P = self.pi.unsqueeze(-1) * self.prob(x)
P = P / (P.sum(dim=0) + self.eps)
return P
def fit(self, x, epochs=1, mu=[]):
if len(mu) > 0:
self.seed_initialize(x, mu)
else:
self.random_initialize(x)
for ep in range(epochs):
# E-step
l = self.predict(x.unsqueeze(1))
# M-step
# calculating r-matrix
r = l * self.pi.unsqueeze(-1)
r = r/(r.sum(dim=0) + self.eps)
#Calculating means
self.means = (r.unsqueeze(-1) * x).sum(dim=1) / (r.sum(dim=-1).sum(dim=-1).view(-1,1) + self.eps)
self.means = self.means.unsqueeze(0)
#calculating update to covariance matrices
self.vars = self.covariance(x)
#Soft updating weights/pi per training epoch
self.pi = self.pi + (self.eps * (r.sum(dim=-1)/(r.sum() + self.eps)))
self.pi = self.pi/self.pi.sum()
# print(ep, self.pi, '\n===][===')
#WORKING ON ALGO from https://colab.research.google.com/drive/1Eb-G95_dd3XJ-0hm2qDqdtqMugLkSYE8#scrollTo=DrsHNw9L5fHc
def covariance(self, x):
l = self.predict(x.unsqueeze(1)) * self.pi.unsqueeze(-1)
l = (l/l.sum(dim=0)).unsqueeze(-1)
E = (x.unsqueeze(1) - self.means).transpose(0,1)
covar = (((l*E).transpose(-1,-2) @ E) / (l.sum(dim=1).unsqueeze(-1))) / (self.dims)
return torch.clamp(covar, min=self.eps)
class dGMM():
def __init__(self,K, dims, eps=5e-3, lr=5e-3):
super(dGMM, self).__init__()
self.k = K
self.dims = dims
self.eps = eps
self.lr = lr
self.means = None
self.vars = None
self.pi = torch.FloatTensor([1/self.k for _ in range(self.k)])
#################################################################
##### initialize values from scratch
#################################################################
def random_initialize(self, x):
idxs = np.random.choice(len(x), size=(self.k,), replace=False)
self.means = x[idxs]
E = (x.unsqueeze(1) - self.means) ** 2
self.vars = torch.rand(size=(self.k,self.dims)) * E.mean(dim=0)
def seed_initialize(self, x, mu):
self.means = mu
E = (x.unsqueeze(1) - self.means) ** 2
self.vars = torch.rand(size=mu.shape) * E.mean(dim=0) #(1/(x.shape[0]-1)) * E.sum(dim=0)
#################################################################
##### Probability and covariance calculations
#################################################################
def likelihood(self, x):
N = torch.distributions.Normal(self.means, self.vars)
return torch.exp(N.log_prob(x.unsqueeze(1)))
def covariance(self, x):
posterior = self.likelihood(x) * self.pi.view(1,-1,1)
posterior = posterior / (posterior.sum(dim=0) + self.eps)
#Updating covariance by normal means was leading to instability. Instead,
# I implement a bastard GIBBS samper to update covariance over time.
#(1) Calculate Error
E = (x.unsqueeze(1) - self.means)
#(2) Calculate the directionality of error vec
DIR = torch.ones(size=E.shape)
DIR = DIR * ((E < 0).float() * -1)
#(2) Find update amount
r = (E**2) * DIR
#(3) Update covariance by update amount * lr
self.vars = self.vars - (self.lr * (1/(r.shape[0]-1)) * r.sum(dim=0))
#################################################################
##### Fit model and predict outputs
#################################################################
def fit(self, x, epochs=1, mu=[]):
if len(mu) > 0:
self.seed_initialize(x, mu)
else:
self.random_initialize(x)
for ep in range(epochs):
####### E-STEP #######
l = self.likelihood(x)
####### M-STEP #######
r = l * self.pi.unsqueeze(-1)
r = r/(r.sum(dim=-1).unsqueeze(-1) + self.eps)
self.means = (r * x.unsqueeze(1)).sum(dim=0) / (r.sum(dim=0) + self.eps)
self.covariance(x)
self.pi = self.pi + (self.lr * r.sum(dim=0).sum(dim=-1) / (r.sum() + self.eps))
self.pi = self.pi/self.pi.sum()
def predict(self, x):
l = self.likelihood(x).sum(dim=-1)
return l #(l/l.sum(dim=0))
#################################################################
##### Save and load previous model versions
#################################################################
def save_weights(self, file):
torch.save({'k': self.k,
'dims': self.dims,
'eps' : self.eps,
'lr': self.lr,
'mu': self.means,
'covar': self.vars,
'pi': self.pi}, file)
def load_weights(self, file):
m = torch.load(file)
self.k, self.dims, self.eps, self.lr = m['k'], m['dims'], m['eps'], m['lr']
self.means, self.vars, self.pi = m['mu'], m['covar'], m['pi']
|
[
"zrosen@uci.edu"
] |
zrosen@uci.edu
|
684cdd7584314554e56bbb62b5727417812180d0
|
cd05fd22fc567700cc00a484b34faa8004fc597d
|
/自研究/pyqt学习/maindemo.py
|
f1cc6eab13fa60c518e8fc7a7cdee4fdb1d6b61d
|
[] |
no_license
|
kcc666/py
|
342647ed9c898933daa86b2cb4d4e7cbc233e9d4
|
4a27d18ac5c730e9203085c9f929c3bab3422c30
|
refs/heads/master
| 2021-07-25T09:59:39.814794
| 2020-08-18T02:21:32
| 2020-08-18T02:21:32
| 207,763,736
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 346
|
py
|
from PyQt5.Qt import *
class Window(QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle("学习")
self.resize(500,500)
def setup_ui(self):
pass
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = Window()
window.show()
sys.exit(app.exec_())
|
[
"463217787@qq.com"
] |
463217787@qq.com
|
eea4371df9310b44093e939f0f47fa6ba184bcde
|
6d4a47b2533145e7f7e62b1c77c51916a0b27784
|
/ProjectFiles/Python/BattleShipGameFiles/EnemyShipAI.py
|
a5eaf18f4017be95af82647b1ad523af354471b2
|
[] |
no_license
|
srjamesjr/srjamesjr.github.io
|
4094176dabc426d198c1a29e42fdedcf99cff07d
|
5dc88c83daed1398edce7359b6f58967e0bd20d6
|
refs/heads/master
| 2020-04-25T15:15:06.526843
| 2019-11-21T05:16:58
| 2019-11-21T05:16:58
| 172,871,861
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,817
|
py
|
import random
def EnemyShipPlacement():
enemyLocation = [0, 0, 0, 0, 0] #ship, rotation, side, X, Y
FoeBoardMatrix = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # a grid represention and the Graphic board
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 13 x 9
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
CursorTypeMinMax = [[[0, 8, 0, 11], [0, 7, 0, 12]],
[[0, 8, 0, 10], [0, 6, 0, 12]],
[[0, 8, 0, 10], [0, 6, 0, 12]],
[[0, 8, 0, 9], [0, 5, 0, 12]],
[[0, 8, 0, 8], [0, 4, 0, 12]],
[[0, 8, 0, 12], [0, 8, 0, 12]]] # [ship][rotation][y,y,x,x]
while enemyLocation[0] <= 4:
EnemyRotation = random.randint(0,1)
EnemyX = random.randint(0, CursorTypeMinMax[enemyLocation[0]][EnemyRotation][3])
EnemyY = random.randint(0, CursorTypeMinMax[enemyLocation[0]][EnemyRotation][1])
enemyLocation = [enemyLocation[0], EnemyRotation, 0, EnemyX, EnemyY] # ship, rotation, side, X, Y
#enemyLocation = [3, 0, 0, 9, 8]
if FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] == 0: # Base space empty
if enemyLocation[0] == 0: # ship 0
if enemyLocation[1] == 0: # Rotation 0
if FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 1 # make it not empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] = 1
# place your ship
enemyLocation[0] = 1 # go to next ship
elif enemyLocation[1] == 1: # Rotation 1
if FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 1 ##make it not empty
FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] = 1
# place your ship
enemyLocation[0] = 1 # go to next ship
elif enemyLocation[0] == 1: # Ship 1
if enemyLocation[1] == 0: # Rotation 0
if FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] == 0 and FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 2 # make it not empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] = 2
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] = 2
# place your ship
enemyLocation[0] = 2 # go to next ship
elif enemyLocation[1] == 1: # Rotation 1
if FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] == 0 and FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 2 ##make it not empty
FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] = 2
FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] = 2
# place your ship
enemyLocation[0] = 2 # go to next ship
elif enemyLocation[0] == 2: # Ship 2
if enemyLocation[1] == 0: # Rotation 0
if FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] == 0 and FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 3 ##make it not empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] = 3
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] = 3
# place your ship
enemyLocation[0] = 3 # go to next ship
elif enemyLocation[1] == 1: # Rotation 1
if FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] == 0 and FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 3 # make it not empty
FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] = 3
FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] = 3
# place your ship
enemyLocation[0] = 3 # go to next ship
elif enemyLocation[0] == 3: # Ship 3
if enemyLocation[1] == 0: # Rotation 0
if FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] == 0 and FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] == 0 and FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 3] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 4 # make it not empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] = 4
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] = 4
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 3] = 4
# place your ship
enemyLocation[0] = 4 # go to next ship
elif enemyLocation[1] == 1: # Rotation 1
if FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] == 0 and FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] == 0 and FoeBoardMatrix[enemyLocation[4] + 3][enemyLocation[3]] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 4 ##make it not empty
FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] = 4
FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] = 4
FoeBoardMatrix[enemyLocation[4] + 3][enemyLocation[3]] = 4
# place your ship
enemyLocation[0] = 4 # go to next ship
elif enemyLocation[0] == 4: # Ship 4
if enemyLocation[1] == 0: # Rotation 0
if FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] == 0 and FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] == 0 and FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 3] == 0 and FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 4] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 5 # make it not empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 1] = 5
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 2] = 5
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 3] = 5
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3] + 4] = 5
# place your ship
enemyLocation[0] = 5 # go to target icon on foes board
enemyLocation[1] = 0
enemyLocation[2] = 0
elif enemyLocation[1] == 1: # Rotation 1
if FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] == 0 and FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] == 0 and FoeBoardMatrix[enemyLocation[4] + 3][enemyLocation[3]] == 0 and FoeBoardMatrix[enemyLocation[4] + 4][enemyLocation[3]] == 0: # if shiplength is empty
FoeBoardMatrix[enemyLocation[4]][enemyLocation[3]] = 5 ##make it not empty
FoeBoardMatrix[enemyLocation[4] + 1][enemyLocation[3]] = 5
FoeBoardMatrix[enemyLocation[4] + 2][enemyLocation[3]] = 5
FoeBoardMatrix[enemyLocation[4] + 3][enemyLocation[3]] = 5
FoeBoardMatrix[enemyLocation[4] + 4][enemyLocation[3]] = 5
# place your ship
enemyLocation[0] = 5
enemyLocation[1] = 0
enemyLocation[2] = 0
return FoeBoardMatrix
def EnemyTargetingSystem(PlayerBoardMatrix, Cursor, ShipType, TestForWin, Player1ShipsDestroyed, right, backward):
EnemyX = random.randint(0, 12)
EnemyY = random.randint(0, 8)
for X in range(0, EnemyX):
right()
for Y in range(0, EnemyY):
backward()
#drop missle on your board
if PlayerBoardMatrix[EnemyY][EnemyX] > 0 and PlayerBoardMatrix[EnemyY][EnemyX] < 6: # on hit
Cursor.shape(ShipType[2])
Cursor.stamp()
PlayerBoardMatrix[EnemyY][EnemyX] = 6
TestForWin("Player1", PlayerBoardMatrix, Player1ShipsDestroyed)
elif PlayerBoardMatrix[EnemyY][EnemyX] == 0: #on miss
Cursor.shape(ShipType[1])
Cursor.stamp()
PlayerBoardMatrix[EnemyY][EnemyX] = 7
TestForWin("Player1", PlayerBoardMatrix, Player1ShipsDestroyed)
else: #already hit
EnemyTargetingSystem(PlayerBoardMatrix, Cursor, ShipType, TestForWin, Player1ShipsDestroyed, right, backward)
return PlayerBoardMatrix
|
[
"noreply@github.com"
] |
srjamesjr.noreply@github.com
|
0ce55e334a15902480268b7355fc7dfe902c439a
|
ab3deecc4de4e30cfbbb1173763126034ca76c83
|
/venv/lib/python3.7/collections/abc.py
|
7e605fa09413bcee5e2727939a003fe259a9262e
|
[] |
no_license
|
AnthonyLimo/BuildForSDGs-Demo
|
03def04795c90f2b51ef5c1e83a8674346eafc98
|
2cb40918cb882e96a79eaf064cf2052fe1b591eb
|
refs/heads/master
| 2022-12-08T04:24:52.282525
| 2020-09-03T15:18:04
| 2020-09-03T15:18:04
| 292,526,106
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 106
|
py
|
/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/collections/abc.py
|
[
"anthony.kiplimo@africastalking.com"
] |
anthony.kiplimo@africastalking.com
|
cf6bee7e32c961feb62eafd9e9a45399bdaf7ce4
|
27b6efd482e4feaf423583a5e9533e232404a340
|
/main.py
|
523be34a994d5761aff11d78884c24aa973c9ccd
|
[] |
no_license
|
kimduuukbae/FindLostItem
|
2b6909ccd32828281c02ba88e9fb64205ed4719e
|
dcc04798c6135e0f671c1ecfdd89aec5775071f7
|
refs/heads/master
| 2020-06-15T03:55:50.239636
| 2019-07-04T08:12:22
| 2019-07-04T08:12:22
| 195,197,425
| 0
| 0
| null | null | null | null |
UHC
|
Python
| false
| false
| 9,139
|
py
|
from urllib.request import urlopen
from urllib.parse import urlencode
import urllib
import xml.etree.ElementTree as ET
from tkinter import *
from tkinter import ttk
from enum import Enum
import smtplib
from email.mime.text import MIMEText
import telepot
import folium
import selenium.webdriver
import requests
my_token = "key"
bot = telepot.Bot(token = my_token)
bot.getMe()
mylists = []
saveitem = ""
class TextType(Enum):
LostItem = 0
LostDay = 1
LostSpot = 2
LostInfo = 3
LostTakeId = 4
def getMaps(string):
urlParams = {
'address': string,
'sensor': 'false',
'language' : 'ko',
'key' : 'key'
}
url = 'https://maps.google.com/maps/api/geocode/json?' + urllib.parse.urlencode(urlParams)
response = requests.get(url)
data = response.json()
lat = 0
lng = 0
if data['status'] != 'ZERO_RESULTS':
lat = data['results'][0]['geometry']['location']['lat']
lng = data['results'][0]['geometry']['location']['lng']
return lat,lng
def handle(msg):
global bot
count = 0
content_type, chat_type, chat_id = telepot.glance(msg)
if content_type != 'text':
bot.sendMessage(chat_id, "텍스트만 보내새유")
return
bot.sendMessage(chat_id, msg['text'] + ' 정보에 대해 알려드릴게요. ')
for i in range(0, len(mylists), 1):
if mylists[i][TextType.LostItem.value].find(msg['text']) != -1:
count += 1
bot.sendMessage(chat_id, "분실 물건 : " + mylists[i][TextType.LostItem.value] + " 분실 날짜 : " + mylists[i][TextType.LostDay.value] + " 분실 장소 : " + mylists[i][TextType.LostSpot.value])
if count is 0:
bot.sendMessage(chat_id, "찾는 내용이 없습니다.")
class MyTk:
def __init__(self):
self.root = Tk()
self.root.title('분실물 찾기 서비스')
self.root.geometry('600x800')
self.mylist = Listbox(self.root, selectmode='extended')
self.mylist.place(x=20, y=50, width=200, height=400)
self.strings = StringVar()
self.emailadd = StringVar()
self.textbox = ttk.Entry(self.root, textvariable=self.strings)
self.textbox.place(x=20, y=5, width=200)
self.textbox2 = ttk.Entry(self.root, textvariable = self.emailadd)
self.textbox2.place(x=20, y =470, width = 400)
self.searchButton = Button(self.root, text ="검색", overrelief="solid", command=self.getList, repeatdelay=1000, repeatinterval=100)
self.searchButton.place(x=230, y=5, width=50, height=20)
self.sendButton = Button(self.root, text = "전송", overrelief = "solid", command=self.sendButtonAction)
self.sendButton.place(x =480, y = 470, width = 50, height =20)
self.clearButton = Button(self.root, text = "초기화", overrelief = "solid", command = self.clear)
self.clearButton.place (x = 290, y = 5, width = 70, height = 20)
self.Label1 = Label(self.root, text="", relief='solid')
self.Label1.place(x=250, y=80, width=300, height=40)
self.Label2 = Label(self.root, text="", relief='solid')
self.Label2.place(x=250, y=150, width=300, height=40)
self.Label3 = Label(self.root, text="", relief='solid')
self.Label3.place(x=250, y=220, width=300, height=40)
self.Label4 = Label(self.root, text="", relief='solid', wraplength = 300)
self.Label4.place(x=250, y=300, width=300, height=150)
self.Label5 = Label(self.root, text="", relief='solid')
self.Label5.place(x=20, y=530, width=560, height=250)
self.Label6 = Label(self.root, text="최근 0건 검색")
self.Label6.place(x = 20, y = 30)
self.Label7 = Label(self.root, text="", justify = 'center')
self.Label7.place(x=150, y=510, width = 100, height = 20)
self.mylist.bind("<Double-Button-1>", self.selection)
self.allcount = self.getCount()
self.end = self.allcount
self.start = self.end - 401
self.yIdx = 0
self.plan = 10
self.selectNum = 0
self.allViewNum = 0
self.savestring = ""
staticText = Label(self.root, text="분실 날짜")
staticText.place(x = 360, y = 60, width = 80, height = 20)
staticText2 = Label(self.root, text="분실 물건")
staticText2.place(x=360, y=130, width=80, height=20)
staticText3 = Label(self.root, text="분실 장소")
staticText3.place(x=360, y=200, width=80, height=20)
staticText4 = Label(self.root, text="분실 정보")
staticText4.place(x=360, y=280, width=80, height=20)
staticText5 = Label(self.root, text = "이메일 전송")
staticText5.place(x=20, y = 450, width=70, height=20)
staticText6 = Label(self.root, text="분실물 수령 위치 : ")
staticText6.place(x=20, y=510, width=110, height=20)
self.s = smtplib.SMTP('smtp.gmail.com',587)
self.s.starttls()
self.s.login('eMail', 'passWord')
self.msg = MIMEText('내용: 본문 내용 테스트')
self.msg['Subject'] = '제목: 메일 보내기 테스트'
bot.message_loop(handle)
self.root.mainloop()
def clear(self):
self.end = self.allcount
self.start = self.end - 201
self.savestring =""
self.mylist.delete(0, self.mylist.size())
self.Label6.config(text="최근 0건 검색")
def sendButtonAction(self):
global mylists
receive = self.textbox2.get()
listargs = mylists[self.selectNum]
msg = MIMEText('분실 날짜 : ' + listargs[TextType.LostDay.value] + '\n' + '분실 물건 : ' + listargs[TextType.LostItem.value] + '\n' + '분실 장소 : ' + listargs[TextType.LostSpot.value] + '\n' + '분실 정보 : \n' + listargs[TextType.LostInfo.value])
msg['Subject'] = "LostItem"
self.s.sendmail("eMail", receive,msg.as_string())
self.s.quit()
def selection(self, event):
global mylists
global saveitem
listargs = mylists[event.widget.curselection()[0]]
self.selectNum = event.widget.curselection()[0]
self.Label1.config(text = listargs[TextType.LostDay.value])
self.Label2.config(text = listargs[TextType.LostItem.value])
self.Label3.config(text=listargs[TextType.LostSpot.value])
self.Label4.config(text=listargs[TextType.LostInfo.value])
if saveitem != listargs[TextType.LostTakeId.value]:
lat,lng = getMaps(listargs[TextType.LostTakeId.value])
if lat != 0 and lng != 0:
a = folium.Map(location=[lat,lng], zoom_start=15)
folium.Marker([lat,lng]).add_to(a)
a.save("save.html")
self.driver = selenium.webdriver.PhantomJS('phantomjs')
self.driver.set_window_size(500, 200)
self.driver.get('save.html')
self.driver.save_screenshot('screenshot.png')
photo = PhotoImage(file="screenshot.png")
self.Label5.config(image = photo)
saveitem = listargs[TextType.LostTakeId.value]
self.Label7.config(text = saveitem)
else:
self.Label5.config(text="회사 정보를 읽어올 수 없습니다.", image = None)
self.Label7.config(text='')
else:
return
def getCount(self):
testCase = "http://openapi.seoul.go.kr:8088/key/xml/lostArticleInfo/1/1/"
return int(ET.ElementTree(file=urllib.request.urlopen(testCase)).getroot().findtext('list_total_count'))
def getList(self):
url = "http://openapi.seoul.go.kr:8088/key/xml/lostArticleInfo/" + str(
self.start) + "/" + str(self.end) + "/"
tree = ET.ElementTree(file=urllib.request.urlopen(url))
root = tree.getroot()
if self.savestring != self.strings.get():
self.savestring = self.strings.get()
self.mylist.delete(0, self.mylist.size())
self.allViewNum = 0
self.end = self.allcount
for a in root.findall('row'):
condition = a.findtext('GET_NAME')
if a.findtext('STATUS') == "수령" or condition.find("고객") != -1 or condition.find("연락") != -1 or condition.find(self.strings.get()) == -1:
continue
if len(mylists) <= self.yIdx:
mylists.append([])
mylists[self.yIdx].append(a.findtext('GET_NAME'))
mylists[self.yIdx].append(a.findtext('REG_DATE'))
mylists[self.yIdx].append(a.findtext('GET_GOOD'))
mylists[self.yIdx].append(a.findtext('GET_THING').replace("<br>", "\n"))
mylists[self.yIdx].append(a.findtext('TAKE_ID'))
self.mylist.insert(self.yIdx, mylists[self.yIdx][TextType.LostItem.value])
self.yIdx += 1
self.allViewNum += self.end - self.start - 1
self.Label6.config(text="최근 {0}건 검색".format(self.allViewNum))
self.end = self.start
self.start = self.end - 401
self.plan += 10
ab = MyTk()
|
[
"39338850+kimduuukbae@users.noreply.github.com"
] |
39338850+kimduuukbae@users.noreply.github.com
|
16850e52f133d71fefd06f2429f65610e2cdaf0f
|
ab19c3757766f00c7414aa10641f8c7e6321375d
|
/h.py
|
93769797074788cc1586538c20c074f4ac20bd00
|
[] |
no_license
|
HHariHHaran/python-programming
|
2c70ff0c4b24ae48b8096075a29ffc0edfe1ef00
|
c2db869e352d7ee22d499dd772f5cb2285b2822f
|
refs/heads/master
| 2020-04-19T09:19:56.918989
| 2019-01-22T09:50:28
| 2019-01-22T09:50:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 66
|
py
|
A = int(raw_input())
for i in xrange(A):
print "hello world"
|
[
"noreply@github.com"
] |
HHariHHaran.noreply@github.com
|
08c724928dd870fb1914f496ecb1e8eed4e0ac31
|
0b12e31cafa598c163d2cc53706df193a73e31e3
|
/pro/urls.py
|
007cf0e8282f4b0d91c9e0b96e3c5297850817bf
|
[] |
no_license
|
getopen/pro
|
6a4dba774558e1de0419a4c6daf030ee360d68fd
|
97e939d26d9fdaf54f05f3cd4a9b32a6722d0ac3
|
refs/heads/master
| 2021-07-06T09:35:18.077577
| 2017-09-30T16:07:06
| 2017-09-30T16:07:06
| 100,471,872
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 936
|
py
|
"""pro URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('question.urls', namespace='question')),
url(r'^', include('people.urls', namespace='user')),
url(r'^sites/', include('sites.urls', namespace='sites')),
]
|
[
"zhuoqun527@qq.com"
] |
zhuoqun527@qq.com
|
b29528a81fe99fcf38ac6da0494cc7091dd75dcb
|
cba4a94cb34c0d5305146f9cab9caeebc40ab11a
|
/lib/miband2time.py
|
48b23e27652c543e704bd87560b1f1399eaedf2b
|
[] |
no_license
|
4m1g0/PyBand2
|
73f0687a498dc448200baeda4cf87962826a14c7
|
06bd92d790576a3794279040976a80bb8d2fcc17
|
refs/heads/master
| 2020-03-22T05:28:30.153367
| 2018-07-02T15:15:15
| 2018-07-02T15:15:15
| 139,568,415
| 1
| 0
| null | 2018-07-03T10:40:38
| 2018-07-03T10:40:38
| null |
UTF-8
|
Python
| false
| false
| 4,101
|
py
|
import calendar
import datetime
import struct
class MiBand2Time:
def __init__(self, device, year, month, day, hour, min, sec=None, weekday=None, dst=0, tz=4):
# Infer precision from parameters if not specified
self.device = device
self.year = year
self.month = month
self.day = day
self.hour = hour
self.min = min
self.sec = sec
if self.sec != None:
self.precision = "sec"
if weekday != None:
self.weekday = weekday
else:
self.weekday = calendar.weekday(self.year, self.month, self.day) + 1
else:
self.precision = "min"
self.dst = dst
self.tz = tz
def toDatetime(self):
return datetime.datetime(self.year, self.month, self.day, self.hour, self.min)
def getBytes(self, honorOffset=False):
# Trick the miband to record sleep out of schedule
if(honorOffset):
if(self.device.sleepOffset != 0):
datetime[3] += self.sleepOffset
if (self.precision == 'min'):
dateBytes = struct.pack('<H4B', self.year, self.month, self.day, self.hour, self.min)
elif (self.precision == 'sec'):
dateBytes = struct.pack('<H7B', self.year, self.month, self.day, self.hour, self.min, self.sec, self.weekday, 0)
else:
raise ValueError('Precision can only be min or sec, got {0}'.format(self.precision))
# Last byte is timezone, but datetime is tz-unaware in python so it shouldn't be needed
tail = struct.pack('2B', self.dst, self.tz)
return dateBytes + tail
@staticmethod
def dateBytesToDatetime(device, datetime):
mbDate = None
if (len(datetime) == 8):
dtm = struct.unpack('<H4B', datetime[0:6])
tz = struct.unpack('<2B', datetime[6:8])
mbDate = MiBand2Time(self, device, dtm[0], dtm[1], dtm[2], dtm[3], dtm[4], dst=tz[0], tz=tz[1])
elif (len(datetime) == 11):
dtm = struct.unpack('<H7B', datetime[0:9])
tz = struct.unpack('<2B', datetime[9:11])
mbDate = MiBand2Time(device, dtm[0], dtm[1], dtm[2], dtm[3], dtm[4], sec=dtm[5], weekday=dtm[6], dst=tz[0], tz=tz[1])
else:
raise ValueError('Unsupported DatetimeBytes length {0}'.format(len(datetime)))
return mbDate
def toMinPrecision(self):
self.precision = "min"
self.sec = None
self.weekday = None
def toSecPrecision(self, sec, weekday):
self.precision = "sec"
self.sec = sec
self.weekday = weekday
def addMinutes(self, minutes):
tmp_sec = self.sec if self.sec != None else 0
tmp_min = (self.min + minutes + (tmp_sec/60))
tmp_hour = (self.hour + (tmp_min/60))
tmp_day = (self.day + (tmp_hour/24)) - 1
monthdays = calendar.monthrange(self.year, self.month)[1]
tmp_month = (self.month + (tmp_day/(monthdays))) - 1
tmp_year = (self.year + (tmp_month/12))
if self.precision == "sec":
tmp_weekday = calendar.weekday(tmp_year, tmp_month%12+1, tmp_day%monthdays+1)+1
return MiBand2Time(self, tmp_year, tmp_month%12+1 , tmp_day%monthdays+1, tmp_hour%24, tmp_min%60, tmp_sec%60, weekday=tmp_weekday, dst=self.dst, tz=self.tz)
else:
return MiBand2Time(self, tmp_year, tmp_month%12+1, tmp_day%monthdays+1, tmp_hour%24, tmp_min%60, dst=self.dst, tz=self.tz)
def minutesUntilNow(self):
now = datetime.datetime.now()
years = now.year - self.year
months = now.month - self.month
days = now.day - self.day
hours = now.hour - self.hour
minutes = now.minute - self.min
return years*365*24*60 + months*30*24*60 + days*24*60 + hours*60 + minutes
def __str__(self):
ret = "{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}".format(self.year, self.month, self.day, self.hour, self.min)
if self.precision == "sec":
ret += ":{0:02}".format(self.sec, self.weekday)
return ret
|
[
"trigork@gmail.com"
] |
trigork@gmail.com
|
96aa6cddb47242fd1fd40905f7a76afab1cc358a
|
a34ed23c7aea6f363d589055057092ec65d96597
|
/mapstory/settings/__init__.py
|
9c925ff4c80ca5db5e7f87fea6fdec58bf02f168
|
[] |
no_license
|
jsiochi/mapstory-geonode
|
471bfdbb4a0a79a4fe2f17addaec355d73fb078f
|
41e1caccbdeb1cdeabdd3f03cc14af72d768d9da
|
refs/heads/master
| 2021-01-17T21:37:24.578904
| 2014-09-09T17:01:25
| 2014-09-09T17:01:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,774
|
py
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
# Django settings for the GeoNode project.
import os
from geonode.settings import *
#
# General Django development settings
#
SITENAME = 'MapStory'
# Defines the directory that contains the settings file as the LOCAL_ROOT
# It is used for relative settings elsewhere.
LOCAL_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
WSGI_APPLICATION = "mapstory.wsgi.application"
# Additional directories which hold static files
STATICFILES_DIRS.insert( 0,
os.path.join(LOCAL_ROOT, "static"),
)
STATIC_ROOT = os.path.join(LOCAL_ROOT, "static_root")
MEDIA_ROOT = os.path.join(LOCAL_ROOT, "uploaded")
# Note that Django automatically includes the "templates" dir in all the
# INSTALLED_APPS, se there is no need to add maps/templates or admin/templates
TEMPLATE_DIRS = (
os.path.join(LOCAL_ROOT, "templates"),
) + TEMPLATE_DIRS
# Location of url mappings
ROOT_URLCONF = 'mapstory.urls'
# Location of locale files
LOCALE_PATHS = (
os.path.join(LOCAL_ROOT, 'locale'),
) + LOCALE_PATHS
# Defines settings for development
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(LOCAL_ROOT, 'development.db'),
},
}
INSTALLED_APPS += (
'mapstory',
'django.contrib.webdesign',
'geonode.contrib.geogit'
)
OGC_SERVER = {
'default' : {
'BACKEND' : 'geonode.geoserver',
'LOCATION' : 'http://localhost:8080/geoserver/',
# PUBLIC_LOCATION needs to be kept like this because in dev mode
# the proxy won't work and the integration tests will fail
# the entire block has to be overridden in the local_settings
'PUBLIC_LOCATION' : 'http://localhost:8000/geoserver/',
'USER' : 'admin',
'PASSWORD' : 'geoserver',
'MAPFISH_PRINT_ENABLED' : True,
'PRINT_NG_ENABLED' : True,
'GEONODE_SECURITY_ENABLED' : True,
'GEOGIT_ENABLED' : True,
'WMST_ENABLED' : False,
'BACKEND_WRITE_ENABLED': True,
'WPS_ENABLED' : True,
# Set to name of database in DATABASES dictionary to enable
'DATASTORE': '', #'datastore',
'TIMEOUT': 10 # number of seconds to allow for HTTP requests
}
}
#@todo remove this hack once maploom can deal with other config
MAP_BASELAYERS = [
{
"source": {
"ptype": "gxp_wmscsource",
"url": OGC_SERVER['default']['PUBLIC_LOCATION'] + "wms",
"restUrl": "/gs/rest",
"name": "local geoserver"
}
},
{
"source": {"ptype": "gxp_osmsource", "name": "OpenStreetMap"},
"type": "OpenLayers.Layer.OSM",
"name": "mapnik",
"title": "OpenStreetMap",
"args": ["OpenStreetMap"],
"visibility": True,
"fixed": True,
"group":"background"
}
]
DEBUG_STATIC = True
REMOTE_CONTENT_URL = 'http://mapstory.dev.boundlessgeo.com/MapStoryOrg/images'
DATABASE_PASSWORD = None
if os.path.exists('mapstory/settings/local_settings.py'):
exec open('mapstory/settings/local_settings.py') in globals()
if DATABASE_PASSWORD:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'mapstory',
'USER': 'mapstory',
'PASSWORD': DATABASE_PASSWORD,
'HOST' : 'localhost',
'PORT' : '5432',
},
'datastore' : {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'mapstory_data',
'USER' : 'mapstory',
'PASSWORD' : DATABASE_PASSWORD,
'HOST' : 'localhost',
'PORT' : '5432',
}
}
OGC_SERVER['default']['DATASTORE'] = 'datastore'
UPLOADER = {
'BACKEND': 'geonode.importer',
'OPTIONS': {
'TIME_ENABLED': True,
'GEOGIT_ENABLED': True,
}
}
USE_BIG_DATE = True
GEOGIT_DATASTORE_NAME = 'geogit'
|
[
"planablediglet@gmail.com"
] |
planablediglet@gmail.com
|
0a69d80d2290b6f3660527d5d070753aadf125cd
|
ec6e842aa3ae88e576fb6dec291f0d068ea42fea
|
/sloth/annotation_containers/darknet.py
|
55e0491b8dd4f9c99c48f08dc085f39223239e40
|
[] |
no_license
|
PalouseRobosub/vision_dev
|
697b84c6200718b1c3c8821dcebf487513ef5501
|
cb30bf9918cecb57e44b72e69592789c27115046
|
refs/heads/master
| 2021-01-23T15:59:16.183537
| 2018-06-06T21:50:56
| 2018-06-06T21:50:56
| 93,279,883
| 1
| 1
| null | 2020-07-22T01:40:48
| 2017-06-03T23:50:39
|
Python
|
UTF-8
|
Python
| false
| false
| 6,342
|
py
|
from sloth.annotations.container import AnnotationContainer
import os
import struct
import imghdr
class DarknetContainer(AnnotationContainer):
def get_image_size(self, fname):
print("Getting image size of {}".format(fname))
'''Determine the image type of fhandle and return its size.
from draco'''
try:
with open(fname, 'rb') as fhandle:
print("fhandle: {}".format(fhandle))
head = fhandle.read(24)
print("Head: {}".format(head))
if len(head) != 24:
return
if imghdr.what(fname) == 'png':
check = struct.unpack('>i', head[4:8])[0]
if check != 0x0d0a1a0a:
return
width, height = struct.unpack('>ii', head[16:24])
elif imghdr.what(fname) == 'gif':
width, height = struct.unpack('<HH', head[6:10])
elif imghdr.what(fname) == 'jpeg':
try:
fhandle.seek(0) # Read 0xff next
size = 2
ftype = 0
while not 0xc0 <= ftype <= 0xcf:
fhandle.seek(size, 1)
byte = fhandle.read(1)
while ord(byte) == 0xff:
byte = fhandle.read(1)
ftype = ord(byte)
size = struct.unpack('>H', fhandle.read(2))[0] - 2
# We are at a SOFn block
fhandle.seek(1, 1) # Skip `precision' byte.
height, width = struct.unpack('>HH', fhandle.read(4))
except Exception: #IGNORE:W0703
return
else:
return
return width, height
except IOError:
print("Failed to open file: {}".format(fname))
return
"""
Containter which writes annotations in the darknet format. This file
will need to be parsed and split to use
"""
def parseFromFile(self, fname):
"""
Overwritten to read darknet format
"""
labels = {}
annotations = []
parentDir = os.path.split(fname)[0] + "/"
with open(fname, "r") as f:
while True:
line = f.readline().rstrip()
if '---' in line and len(labels) > 0:
# Stop adding to list
break
elif '---' not in line:
data = line.split(":")
print (data)
labels[int(data[1])] = data[0]
#All labels loaded
tmp = {}
for line in f:
if ">>" in line:
_,filename = line.split(" ")
tmp["filename"] = filename[1:-2]
tmp["class"] = "image"
tmp["annotations"] = []
elif "<<" in line:
if not tmp['annotations']:
tmp["unlabeled"] = True
annotations.append(tmp)
tmp = {}
elif len(line) > 0:
data = line.split(" ")
label = {}
label["class"] = labels[int(data[0])]
path = parentDir + tmp['filename']
size = self.get_image_size(path)
if size is None:
print("Invalid size")
tmp = {}
continue
label["height"] = data[3] * size[0]
label["width"] = data[4] * size[0]
label["x"] = data[1] * size[0]
label["y"] = data[2] * size[1]
return annotations
def serializeToFile(self, fname, annotations):
"""
Overwritten to write darknet files
"""
print("Writing to file: {}".format(fname))
parentDir = os.path.split(fname)[0]
parentDir = parentDir + ("/" if parentDir else "")
with open(fname, "w") as f:
print("File open")
labels = []
for an in annotations:
print("Using annotation: {}".format(an['annotations']))
for l in an['annotations']:
if l['class'] not in labels:
print("Adding class: {}".format(l['class']))
labels.append(l['class'])
print("Labels: {}".format(labels))
print ("Created class list")
# Write class number to label conversion header
f.write("---\n")
print("Labels: {}".format(labels))
for i, item in enumerate(labels):
print("Writing pair: {}:{}".format(item, i))
f.write("{} : {}\n".format(item, i))
print("Wrote pair to file")
f.write("---\n")
print("Wrote class labels")
# Write each file's annotations
for an in annotations:
f.write(">> \"" + an['filename'] + "\"\n")
print("Using parent Dir: {}".format(parentDir))
print("Getting image size of: {}".format(an["filename"]))
path = parentDir + an['filename']
size = self.get_image_size(path)
print("Size: {}".format(size))
for label in an['annotations']:
print("Adding label: {}".format(label))
dw = 1.0/size[0]
dh = 1.0/size[1]
x = (label['width'] / 2.0) + label['x']
y = (label['height'] / 2.0) + label['y']
x = x * dw
w = label['width'] * dw
y = y * dh
h = label['height'] * dh
print("Writing label to file")
f.write("{} {} {} {} {}\n".format(labels.index(label['class']), x, y, w, h))
print("Wrote to file")
f.write("<<\n")
print("Wrote annotations")
print("Finished writing")
return
|
[
"seanp@kallaher.org"
] |
seanp@kallaher.org
|
45743441ca09f232e46a89061c07576fb6fc06f8
|
af30f87a267495b204e5b5cc5be8f9244bb77747
|
/afib_diagnosis/venv/Scripts/django-admin.py
|
fbf52d82cf4a49bc54383da34610a0950ea17c1f
|
[] |
no_license
|
hollandsean/FYP-Web
|
bc9517507f8e78f8b02044f7225e6ae5b453de26
|
e179e872d590bc5d94ac1e8b11775c044d66e3a1
|
refs/heads/master
| 2020-03-17T19:22:53.467343
| 2018-05-17T20:24:10
| 2018-05-17T20:24:10
| 131,615,966
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 204
|
py
|
#!C:\Users\sean\Documents\FOURTH_YEAR_SES_TWO\PROJECT\AFIB\afib_diagnosis\venv\Scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
[
"sean.holland@mycit.ie"
] |
sean.holland@mycit.ie
|
3aceae95ee0d6b94bb559db47039e4e77469f3b2
|
09478b6d8a1b785067a550fab4d3ef6445c18f58
|
/Flask/06.10/employee.py
|
d0766fce71ce8467a4607d5f89fbbde65bfb35a0
|
[] |
no_license
|
Mikeyc85/QA-Training
|
a8c003d9dfe9ccde11bcce8753ae1d8815c89406
|
ef33a8b7d2a3430fd0847de434fec370f5460a62
|
refs/heads/main
| 2023-08-13T04:21:30.457583
| 2021-10-13T19:50:00
| 2021-10-13T19:50:00
| 406,368,004
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,816
|
py
|
from flask import Flask, render_template,request,redirect
import mysql.connector
db=mysql.connector.connect(host="localhost",user="root",password="root",database="employees",auth_plugin='mysql_native_password')
cursor=db.cursor()
app = Flask(__name__)
@app.route("/")
def homepage():
cursor.execute("select * from personalInfo")
data=cursor.fetchall()
return render_template("Homepage.html",records=data)
@app.route("/personalInfo_entry")
def personalinfo_entry():
return render_template("personalinfoform.html")
@app.route("/personalInfo_save",methods=["POST"])
def personalinfo_save():
cursor.execute("select ifnull(max(empno),0)+1 from personalInfo")
newempno=cursor.fetchone()
name=request.form["name"]
department=request.form["department"]
address=request.form["address"]
country=request.form["country"]
insertcommand="insert into personalInfo values({0},'{1}','{2}','{3}','{4}')".format((newempno[0]),name,department,address,country,)
cursor.execute(insertcommand)
db.commit()
return redirect("/")
@app.route("/deleteemployee/<eno>")
def deleteemployee(eno):
cursor.execute("delete from personalInfo where empno="+eno)
db.commit()
return redirect("/")
@app.route("/editemployee/<empno>")
def editemployee(empno):
cursor.execute("select * from personalInfo where empno="+empno)
data=cursor.fetchone()
return render_template("personalinfoedit.html",record=data)
@app.route("/personalInfo_edit",methods=["POST"])
def personalinfo_edit():
updatecommand="update personalInfo set empname='{0}',address='{1}' where empno={2}".format(request.form["name"],request.form["address"],request.form["empno"])
cursor.execute(updatecommand)
db.commit()
return redirect("/")
@app.route("/countryvizlist/<country>")
def countryvizlist(country):
cursor.execute("select * from personalInfo where country='{0}'".format(country))
listofemployees=cursor.fetchall()
cursor.execute("select count(*) from personalInfo where country='{0}'".format(country))
numberofemployees=cursor.fetchone()
msg="List of Employees from "+country
return render_template("SpecificList.html",records=listofemployees,number=numberofemployees[0],message=msg)
@app.route("/departmentlist/<dept>")
def deptlist(dept):
cursor.execute("select * from personalInfo where department='{0}'".format(dept))
listofemployees=cursor.fetchall()
cursor.execute("select count(*) from personalInfo where department='{0}'".format(dept))
numberofemployees=cursor.fetchone()
msg="List of Employees working in "+dept
return render_template("SpecificList.html",records=listofemployees,number=numberofemployees[0],message=msg)
app.run(debug=True)
|
[
"noreply@github.com"
] |
Mikeyc85.noreply@github.com
|
7841a010dbd4becdaddb5035acd0c8bf901d297e
|
22940905e1622b06670a585956007569b74ce306
|
/DeepLearningApp.py
|
f9b400abf39e64edc6d433bdbf5acb81458938bb
|
[] |
no_license
|
DuongQuocVuong97/streamlit
|
3cc883daa11ee1c3437f6dc1e81b4a4c27d2df73
|
54feb11b68ee9a81ab1162a1fec792db4b611cd4
|
refs/heads/main
| 2023-07-10T21:50:16.653967
| 2021-08-24T08:00:40
| 2021-08-24T08:00:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,895
|
py
|
# Import Dependancies
from datetime import date
import yfinance as yf
from plotly import graph_objs as go
import streamlit as st
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn.metrics import mean_squared_error
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import Sequential, layers, callbacks
from tensorflow.keras.layers import Dense, LSTM, Dropout, GRU, Bidirectional
import warnings
warnings.filterwarnings('ignore')
st.set_option('deprecation.showPyplotGlobalUse', False)
# Data Collection
START = "2005-01-01"
TODAY = date.today().strftime("%Y-%m-%d")
st.title('Stock Market Deep Learning App')
stocks = ('^BSESN','GOOG', 'AAPL', 'MSFT', 'GME')
selected_stock = st.selectbox('Select dataset for prediction', stocks)
n_years = st.slider('Months of prediction:', 3, 5)
period = n_years * 365
@st.cache
def load_data(ticker):
data = yf.download(ticker, START, TODAY)
data.reset_index(inplace=True)
return data
data_load_state = st.text('Loading data...')
data = load_data(selected_stock)
data_load_state.text('Loading data... done!')
train_dates = pd.to_datetime(data['Date'])
cols = list(data)[1:6]
df_for_training = data[cols].astype(float)
st.subheader('Raw data')
st.write(data.tail())
# Plot raw data
def plot_raw_data():
fig = go.Figure()
fig.add_trace(go.Scatter(x=data['Date'], y=data['Open'], name="stock_open"))
fig.add_trace(go.Scatter(x=data['Date'], y=data['Close'], name="stock_close"))
fig.layout.update(title_text='Time Series data with Rangeslider', xaxis_rangeslider_visible=True)
st.plotly_chart(fig)
plot_raw_data()
# st.subheader('Data Cleaning')
# Check for missing values
# with st.echo():
#Check for Missing Values
df = data.loc[:,['Date','Close']]
(df.Close.isna().sum())
df_missing_date = df.loc[df.Close.isna() == True]
df_missing_date.loc[:, ['Date']]
# Replcase missing value with interpolation
df.Close.interpolate(inplace=True)
df = df.drop('Date', axis=1)
# st.subheader('Data Transformation')
# with st.echo():
# Split train data and test data
whole_data = int(len(df) * 1)
train_size = int(len(df) * 0.8)
# Use iloc to select a number of rows
train_data = df.iloc[:train_size]
test_data = df.iloc[train_size:]
# with st.echo():
# Scale the data
# The input to scaler.fit -> array-like, sparse matrix, dataframe of shape (n_samples, n_features)
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler().fit(train_data)
train_scaled = scaler.transform(train_data)
test_scaled = scaler.transform(test_data)
# with st.echo():
# Create input dataset
# Th input shape should be [samples, time steps, features]
def create_dataset(X, look_back=1):
Xs, ys = [], []
for i in range(len(X) - look_back):
v = X[i:i + look_back]
Xs.append(v)
ys.append(X[i + look_back])
return np.array(Xs), np.array(ys)
X_train, y_train = create_dataset(train_scaled, 30)
X_test, y_test = create_dataset(test_scaled, 30)
# X_train.shape
# y_train.shape
# X_test.shape
# y_test.shape
#
# X_test[:50].shape
st.sidebar.title('Hyperparameters')
n_neurons = st.sidebar.slider('Neurons', 1, 100, 50)
l_rate = st.sidebar.selectbox('Learning Rate', (0.0001, 0.001, 0.01), 1)
n_epochs = st.sidebar.number_input('Number of Epochs', 1, 50, 20)
st.subheader('Build the Bidirectional Deep Learning Model & Fit the Model')
# with st.echo():
# Import Dependancies
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import Sequential, layers, callbacks
from tensorflow.keras.layers import Dense, LSTM, Dropout, GRU, Bidirectional
# Build The Model
model = Sequential()
# Input layer
model.add(Bidirectional(LSTM(n_neurons, activation='relu', return_sequences=False),
input_shape=(X_train.shape[1], X_train.shape[2])))
# Hidden layer
# model.add(Bidirectional(LSTM(n_neurons)))
# Output Layer
model.add(Dense(1, activation='linear', name='Close'))
# with st.echo():
# Compile The Model
opt = keras.optimizers.Adam(l_rate)
model.compile(optimizer=opt, loss='mse', metrics=['mse'])
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10)
train = st.button('Train Model')
if train:
with st.spinner('Training Model…'):
with st.echo():
model.summary(print_fn=lambda x: st.write('{}'.format(x)))
history = model.fit(
X_train,
y_train,
epochs=n_epochs,
validation_split=0.2,batch_size=16, shuffle=False,callbacks=[early_stop]
)
st.success('Model Training Complete!')
y_test = scaler.inverse_transform(y_test)
y_train = scaler.inverse_transform(y_train)
st.subheader('Plot the Model Loss')
# with st.echo():
st.line_chart(pd.DataFrame(history.history))
st.subheader('Making Model Predictions on Test Data & New Data Set')
# with st.echo():
X_new = X_test
predictions = model.predict(X_new)
predictions = scaler.inverse_transform(predictions)
predictions
y_test[:10]
# with st.echo():
# Plot test data vs prediction
plt.figure(figsize=(10, 6))
range_future = len(predictions)
plt.plot(np.arange(range_future), np.array(y_test), label='Test data')
plt.plot(np.arange(range_future), np.array(predictions), label='Prediction')
plt.title('Test data vs prediction')
plt.legend(loc='upper left')
plt.xlabel('Time (day)')
plt.ylabel('Daily Closing Price of Stock')
st.pyplot()
# with st.echo():
# Make New Test Data
# Select 60 days of data from test data
new_data = test_data.iloc[100:160]
# Scale the input
scaled_data = scaler.transform(new_data)
# Reshape the input
def create_dataset(X, look_back=1):
Xs = []
for i in range(len(X) - look_back):
v = X[i:i + look_back]
Xs.append(v)
return np.array(Xs)
X_30 = create_dataset(scaled_data, 30)
# with st.echo():
# Make prediction for new data
predictions1 = model.predict(X_30)
predictions1 = scaler.inverse_transform(predictions1)
st.subheader('Evaluate The Model Performance')
# with st.echo():
# Calculate MAE and RMSE
errors = predictions - y_test
mse = np.square(errors).mean()
rmse = np.sqrt(mse)
mae = np.abs(errors).mean()
mae
rmse
scaler = MinMaxScaler()
scaler = scaler.fit(df_for_training)
df_for_training_scaled = scaler.transform(df_for_training)
trainX = []
trainY = []
n_future = 1 # Number of days we want to predict into the future
n_past = 14 # Number of past days we want to use to predict the future
for i in range(n_past, len(df_for_training_scaled) - n_future + 1):
trainX.append(df_for_training_scaled[i - n_past:i, 0:df_for_training.shape[1]])
trainY.append(df_for_training_scaled[i + n_future - 1:i + n_future, 0])
trainX, trainY = np.array(trainX), np.array(trainY)
model = Sequential()
model.add(Bidirectional(LSTM(50, activation='relu', return_sequences=False),
input_shape=(trainX.shape[1], trainX.shape[2])))
model.add(Dense(1))
model.compile(optimizer='adam', loss='mean_squared_error', metrics=['mse'])
model.summary()
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10)
# fit model
history = model.fit(trainX, trainY, epochs=25, batch_size=16, validation_split=0.1, verbose=1, shuffle=False,
callbacks=[early_stop])
n_future = 90 # Redefining n_future to extend prediction dates beyond original n_future dates...
forecast_period_dates = pd.date_range(list(train_dates)[-1], periods=n_future, freq='1d').tolist()
forecast = model.predict(trainX[-n_future:]) # forecast
forecast_copies = np.repeat(forecast, df_for_training.shape[1], axis=-1)
y_pred_future = scaler.inverse_transform(forecast_copies)[:, 0]
# Convert timestamp to date
forecast_dates = []
for time_i in forecast_period_dates:
forecast_dates.append(time_i.date())
df_forecast = pd.DataFrame({'Date': np.array(forecast_dates), 'Close': y_pred_future})
df_forecast['Date'] = pd.to_datetime(df_forecast['Date'])
with st.echo():
df_forecast
original = data[['Date', 'Close']]
original['Date'] = pd.to_datetime(original['Date'])
original = original.loc[original['Date'] >= '2020-4-1']
st.subheader('PREDICTING THE FUTURE')
# with st.echo():
def plot_future_data():
fig = go.Figure()
fig.add_trace(go.Scatter(x=original['Date'], y=original['Close'], name="Historical Trend"))
fig.add_trace(go.Scatter(x=df_forecast['Date'], y=df_forecast['Close'], name="Forecast"))
fig.layout.update(title_text='Future Price Direction', xaxis_rangeslider_visible=True)
st.plotly_chart(fig)
plot_future_data()
|
[
"noreply@github.com"
] |
DuongQuocVuong97.noreply@github.com
|
19980c6f1fb1f32f16324e451c5252e77f2d892d
|
484b03da7d3eabc8887e0f5a6be6c2287b0ebcc4
|
/fanta/fantaapp/models/models_old.py
|
927ccbdc96583b931e3d572b048ff91e843cad8b
|
[] |
no_license
|
abenassen/holyfootball
|
4ad3d025692f4dd0d7d5b09f8a190f131da4ca57
|
cd41a44b33276b8ff1a5d3d50f944f526826abd5
|
refs/heads/master
| 2020-12-03T00:20:13.079415
| 2017-09-02T09:50:23
| 2017-09-02T09:50:23
| 96,015,986
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 47,956
|
py
|
nome_turni_coppa = [u'Finale', u'Semifinale', u'Quarti', u'Ottavi', u'Sedicesimi', u'Trentaduesimi', u'Sessantaquattresimi', u'Turno Preliminare']
votoprimavera = {'P': 3.5, 'D':4.5, 'C':4.5, 'A':4.5}
ruoli_lunghi = {'P': "Portiere", 'D':"Difensore", 'C':"Centrocampista", 'A':"Attaccante"}
ruoli_lunghi_plurali = {'P': "Portieri", 'D':"Difensori", 'C':"Centrocampisti", 'A':"Attaccanti"}
def randomHash():
N = 12
return ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(N))
def lastCampionato():
return Campionato.objects.latest('id').id
class Redazione(models.Model):
nome = models.CharField(max_length=50) # nome della redazione
descrizione = models.TextField() # descrizione del funzionamento della redazione
def __unicode__(self):
return self.nome
class Campionato(models.Model):
"""Si riferisce ad un campionato nazionale, e.g. Serie A, Liga..."""
nome = models.CharField(max_length=50, unique=True) #nome del campionato
datainizio = models.DateField() #data indicativa inizio campionato
datafine = models.DateField() #data indicativa fine campionato
totale_giornate = models.PositiveIntegerField(default=38)
def __unicode__(self):
return self.nome
def giornate_disputate(self):
try:
val = self.giornata_set.filter(disputata='True').latest('numero').numero
except ObjectDoesNotExist:
val = 0
return val
def giornate_da_disputare(self):
return self.totale_giornate - self.giornate_disputate()
class Lega(models.Model):
nome = models.CharField(max_length=50) # nome della lega
descrizione = models.TextField(blank=True) # descrizione della lega
numeropartecipanti = models.PositiveIntegerField(default=10, validators=[MaxValueValidator(20)]) # descrizione della lega
codice = models.CharField(max_length=20, default=randomHash, unique = True, db_index=True)
calcolo_voto = models.CharField(max_length=100, default='votostd') # contiene il riferimento alla funzione di un oggetto della classe Voto per il calcolo del voto, inserita nel modulo funzioni_voto.py
budgetiniziale = models.PositiveIntegerField(default=1000)
numeroportieri = models.PositiveIntegerField(default=3)
numerodifensori = models.PositiveIntegerField(default=8)
numerocentrocampisti = models.PositiveIntegerField(default=8)
numeroattaccanti = models.PositiveIntegerField(default=6)
con_coppa = models.BooleanField(default=True)
redazione = models.ForeignKey(Redazione)
campionato = models.ForeignKey(Campionato, default=lastCampionato)
numero_gironi = models.PositiveIntegerField(default=4)
def get_absolute_url(self):
return reverse('aprilega', kwargs={'legahash': self.codice})
def nuovo_allenatore(self, utente, is_amministratore=False):
if (self.allenatore_set.count() >= self.numeropartecipanti):
raise ValueError("Lega gia' completa!")
return Allenatore(lega=self, utente=utente, amministratore=is_amministratore, budget=self.budgetiniziale,
numeroportieri=self.numeroportieri,numerodifensori=self.numerodifensori,
numerocentrocampisti=self.numerocentrocampisti,numeroattaccanti=self.numeroattaccanti)
def limite_ruolo(self, ruolo):
"""restituisce il numero di calciatori per rosa in un dato ruolo dato da 'P','D'... """
return getattr(self, 'numero'+ ruoli_lunghi_plurali[ruolo].lower())
@property
def limite_tesserati(self):
tot = 0
for r in ruoli_lunghi.keys():
tot = tot + self.limite_ruolo(r)
return tot
def totale_giornate(self):
return self.numero_gironi*(self.numeropartecipanti-1)
def genera_calendario(self):
"""genera tutti gli accoppiamenti del torneo"""
if (self.giornatalega_set.count()>0):
raise ValueError("Il set delle giornate non e' vuoto! " + str(self.giornatalega_set.count()) + " incontri gia' presenti")
if ((self.numeropartecipanti - 1)*self.numero_gironi > self.campionato.giornate_da_disputare):
self.numero_gironi = math.floor(self.campionato.giornate_da_disputare/(self.numeropartecipanti - 1))
if (self.numero_gironi==0):
raise ValueError('Non ci sono sufficienti giornate nel campionato per generare almeno un girone.')
allenatori = list(self.allenatore_set.all()) #ottengo la lista di allenatori della lega
if len(allenatori)<self.numeropartecipanti: # se gli allenatori iscritti alla lega sono meno del numero di partecipanti fissato per la lega, la lega e' incompleta
raise ValueError("Numero di allenatori non sufficiente a riempire la lega: %d." % (len(allenatori)))
random.shuffle(allenatori) # mischio gli allenatori
rr = roundRobin(allenatori) # genero gli accoppiamenti di tutte le giornate
if (self.numero_gironi%2 == 0): # se il numero di gironi e' pari tengo conto di casa e fuori casa
rr1 = [[ (x[1], x[0]) for x in giornata] for giornata in rr]
accoppiamenti = (rr+rr1)*(self.numero_gironi/2)
else:
accoppiamenti = rr*self.numero_gironi
for g, giornata in enumerate(accoppiamenti):
giornata_new = GiornataLega(lega=self,numero=(g+1))
giornata_new.save()
for acc in giornata:
if(acc[0] is None or acc[1] is None): # e' un incontro fittizio in cui una squadra riposa
continue
inc = IncontroLega(allenatorecasa=acc[0], allenatoretrasferta=acc[1], lega=self, giornatalega=giornata_new)
inc.save()
def genera_coppa(self):
if (self.giornatalega_set.count() < self.totale_giornate()):
raise ValueError("Un numero insufficiente di giornate e' presente. Avvia la generazione del calendario prima.")
if (self.incontrocoppa_set.count()>0):
raise ValueError("Il set delle giornate non e' vuoto! " + str(self.incontrocoppa_set.count()) + " incontri gia' presenti")
allenatori = list(self.allenatore_set.all()) #ottengo la lista di allenatori della lega
if len(allenatori)<self.numeropartecipanti: # se gli allenatori iscritti alla lega sono meno del numero di partecipanti fissato per la lega, la lega e' incompleta
raise ValueError("Numero di allenatori non sufficiente a riempire la lega: %d." % (len(allenatori)))
random.shuffle(allenatori) # mischio gli allenatori
numero_turni = int(math.log(self.numeropartecipanti,2)) # numero turni da disputare, piu' eventualmente il preliminare
turni_da_disputare = numero_turni
numero_partite = 2*(numero_turni - 1) + 1 # i turni sono andata e ritorno tranne la finale
turno_prec = []
turno_curr = []
if (2**numero_turni<self.numeropartecipanti): # il turno preliminare e' necessario
numero_partite = numero_partite + 2 # si aggiungono i due turni preliminari
gap_giornate = self.totale_giornate()/numero_partite # ogni quante giornate di campionato se ne disputa una di coppa
numero_giornata_corrente = gap_giornate - 1 + (self.totale_giornate()% numero_partite) # voglio che la finale si disputi alla penultima giornata
giornata_coppa_1 = self.giornatalega_set.get(numero=numero_giornata_corrente) # giornata dell'andata
giornata_coppa_2 = self.giornatalega_set.get(numero=(numero_giornata_corrente+gap_giornate)) # giornata del ritorno
for m,(x,y) in enumerate(izip(allenatori, allenatori[(len(allenatori)+1)/2:])): #creo il turno preliminare sorteggiando a caso delle coppie
inc = IncontroCoppa.create(x,y, giornata_coppa_1, giornata_coppa_2, lega=self, tipo=nome_turni_coppa[-1], indice=(m+1)); # preliminare
inc.save()
numero_giornata_corrente = numero_giornata_corrente + 2*gap_giornate # incremento di due giornate, i turni preliminari gia' considerati
giornata_coppa_1 = self.giornatalega_set.get(numero=numero_giornata_corrente) # giornata dell'andata
giornata_coppa_2 = self.giornatalega_set.get(numero=(numero_giornata_corrente+gap_giornate)) # giornata del ritorno
for m in range(2**(turni_da_disputare-1)): #creo il primo turno eliminatorio
inc = IncontroCoppa.create(None,None, giornata_coppa_1, giornata_coppa_2, lega=self, tipo=nome_turni_coppa[turni_da_disputare-1], indice=(m+1)); # preliminare
turno_curr.append(inc)
inc.save()
else:
gap_giornate = self.totale_giornate()/numero_partite # ogni quante giornate di campionato se ne disputa una di coppa
numero_giornata_corrente = gap_giornate - 1 + (self.totale_giornate()% numero_partite) # voglio che la finale si disputi alla penultima giornata
giornata_coppa_1 = self.giornatalega_set.get(numero=numero_giornata_corrente) # giornata dell'andata
giornata_coppa_2 = self.giornatalega_set.get(numero=(numero_giornata_corrente+gap_giornate)) # giornata del ritorno
for m,(x,y) in izip(allenatori, allenatori[len(allenatori)/2+1:]): #creo il primo turno eliminatorio sorteggiando a caso delle coppie
inc = IncontroCoppa.create(x,y, giornata_coppa_1, giornata_coppa_2, lega=self, tipo=nome_turni_coppa[turni_da_disputare-1], indice=(m+1)); # preliminare
inc.save()
turno_curr.append(inc)
turni_da_disputare = turni_da_disputare - 1
print "turni da disp", turni_da_disputare
for turno in range(turni_da_disputare):
numero_giornata_corrente = numero_giornata_corrente + 2*gap_giornate # incremento di due giornate, che riguardano il turno precedente
print numero_giornata_corrente
giornata_coppa_1 = self.giornatalega_set.get(numero=numero_giornata_corrente) # giornata dell'andata
if(turno!=turni_da_disputare-1): # se non sono alla finale...
giornata_coppa_2 = self.giornatalega_set.get(numero=(numero_giornata_corrente+gap_giornate)) # giornata del ritorno
else:
giornata_coppa_2 = None
turno_prec = turno_curr
turno_curr = []
for m,(inc1,inc2) in enumerate(izip(turno_prec, turno_prec[(len(turno_prec)+1)/2:])): #creo il turno preliminare sorteggiando a caso delle coppie
inc = IncontroCoppa.create(None,None,giornata_coppa_1, giornata_coppa_2, incontrocasa=inc1, incontrotrasferta=inc2, lega=self,
tipo=nome_turni_coppa[turni_da_disputare-1-turno], indice=(m+1)); # turno eliminatorio
turno_curr.append(inc)
inc.save()
[incontro_finale] = turno_curr
incontro_finale.andata_e_ritorno = False
incontro_finale.save()
def __unicode__(self):
return self.nome
class LegaForm(ModelForm):
class Meta:
fields = ['nome','numeropartecipanti', 'descrizione', 'redazione']
labels = {
'nome': _('Nome della lega'),
}
#help_texts = {
# 'name': _('Some useful help text.'),
#}
error_messages = {
'nome': {
'max_length': _("Il nome della lega e' troppo lungo."),
},
}
model = Lega
widgets = {
'nome': TextInput(attrs={'placeholder': 'Nome Lega'}),
'descrizione': Textarea(attrs={'rows': '2'}),
}
class Allenatore(models.Model):
"""si riferisce ad un allenatore/squadra presente in una lega associato ad un dato utente"""
utente = models.ForeignKey(settings.AUTH_USER_MODEL) #utente corrispondente a quest'allenatore/squadra
lega = models.ForeignKey(Lega)
budget = models.PositiveSmallIntegerField(editable=False, validators=[MinValueValidator(0)])
numeroportieri = models.PositiveIntegerField(editable=False, validators=[MinValueValidator(0)]) #numero di portieri da acquistare
numerodifensori = models.PositiveIntegerField(editable=False, validators=[MinValueValidator(0)])
numerocentrocampisti = models.PositiveIntegerField(editable=False, validators=[MinValueValidator(0)])
numeroattaccanti = models.PositiveIntegerField(editable=False, validators=[MinValueValidator(0)])
nomesquadra = models.CharField(max_length=200)
amministratore = models.BooleanField(default=False) # dice se l'allenatore e' uno degli amministratori della lega
logourl = models.URLField(default='/static/fantaapp/images/savona.png')
editabili = ['nomesquadra', 'logourl']
class Meta:
unique_together = (("utente", "lega"),) # c'e' un unico allenatore/squadra per un dato utente/lega
def resetta(self):
self.numeroportieri = self.lega.numeroportieri
self.numerodifensori = self.lega.numerodifensori
self.numerocentrocampisti = self.lega.numerocentrocampisti
self.numeroattaccanti = self.lega.numeroattaccanti
self.budget = self.lega.budgetiniziale
def save(self, *args, **kwargs):
if (not(self.nomesquadra) or self.nomesquadra==""): # se la squadra non e' stata impostata le da' un nome di default
self.nomesquadra="Squadra di " + self.utente.profile.alias
if (not(self.id)): # sta venendo creato in questo momento
self.resetta()
super(Allenatore, self).save(*args, **kwargs)
def numero_ruolo(self, ruolo):
"""numero calciatori in un dato ruolo ancora da acquistare"""
return getattr(self, 'numero'+ ruoli_lunghi_plurali[ruolo].lower())
def decresci_ruolo(self, ruolo):
currval = getattr(self, 'numero'+ ruoli_lunghi_plurali[ruolo].lower())
setattr(self, 'numero'+ ruoli_lunghi_plurali[ruolo].lower(), currval - 1)
def cresci_ruolo(self, ruolo):
currval = getattr(self, 'numero'+ ruoli_lunghi_plurali[ruolo].lower())
setattr(self, 'numero'+ ruoli_lunghi_plurali[ruolo].lower(), currval + 1)
@property
def nome(self):
return self.utente.profile.alias
@property
def totale_da_tesserare(self):
tot = 0
for r in ruoli_lunghi.keys():
tot = tot + self.numero_ruolo(r)
return tot
def ottieni_rosa(self):
"""produce un dizionari dei ruoli contenente le liste di coppie (calciatore, prezzo)"""
transf = self.trasferimentorosa_set.select_related('calciatore').all()
rosa_dict = {}
for r in ruoli_lunghi_plurali.keys():
rosa_dict[r] = []
for tr in transf:
print tr
ru = tr.calciatore.ruolo.get(redazione=self.lega.redazione).nome
if tr.acquisto:
print 'acquisto'
rosa_dict[ru].append((tr.calciatore, tr.valore))
else:
print 'cessione'
rosa_dict[ru] = [x for x in rosa_dict[ru] if x[0].id != tr.calciatore.id]
print rosa_dict[ru]
return rosa_dict
def __unicode__(self):
return self.nomesquadra
class Messaggio(models.Model):
"""Contiene i messaggi su cio' che accade nella lega e ai singoli allenatori"""
lega = models.ForeignKey(Lega) # lega del messaggio
allenatore = models.ForeignKey(Allenatore, blank=True, null=True) # destinatario del messaggio, se None e' per tutti
testo = models.CharField(max_length=200) # testo del messaggio
data = models.DateTimeField(auto_now=True) # data del messaggio
@property
def datatesto(self):
return self.data.strftime("%d-%m-%Y, %H:%M. ") + self.testo
def __unicode__(self):
return self.datatesto
class SquadraCampionato(models.Model):
"""Si riferisce ad una vera squadra nel campionato"""
nome = models.CharField(max_length=50) # nome della squadra
campionato = models.ForeignKey(Campionato) # campionato di appartenenza
def __unicode__(self):
return self.nome.title()
class Giornata(models.Model):
"""Si riferisce ad una giornata di un campionato reale"""
campionato = models.ForeignKey(Campionato)
numero = models.PositiveSmallIntegerField() # numero della giornata nel campionato da 1,2,...
data = models.DateTimeField(auto_now_add=True) # data indicativa della giornata
disputata = models.BooleanField(default=False) # dice se la giornata e' stata disputata
def aggiorna(self):
if (self.disputata): #se e' gia' disputata non c'e' niente da fare
return
match_giornata = self.incontrocampionato_set # altri incontri della stessa giornata
if (match_giornata.exists()):
disputata = False
try: # cerco la prossima giornata di campionato, se dista meno di due giorni all'inizio, questa la ritengo comunque disputata
prox = self.campionato.giornata_set.get(numero=self.numero+1)
except Giornata.DoesNotExist:
prox = None
if (prox):
ore = (prox.data - datetime.datetime.now(utc))
ore = ore.days*24 + ore.seconds//3600
disputata = (ore < 40) # se mancano meno di quaranta ore alla prossima giornata, ritengo questa conclusa
disputata = disputata or all(match_giornata.values_list('disputato', flat=True)) # controlla che tutti gli incontri della giornata sono stati disputati
data = min(match_giornata.values_list('data', flat=True)) # prende la data del primo incontro
self.disputata = disputata
self.data = data
self.save()
def __unicode__(self):
return "Giornata %d" % self.numero
class IncontroCampionato(models.Model):
"""Si riferisce ad un vero incontro disputato tra due squadre del campionato"""
data = models.DateTimeField(auto_now_add=True) #data d'inizio dell'incontro
giornata = models.ForeignKey(Giornata) # giornata a cui l'incontro appartiene
squadracasa = models.ForeignKey(SquadraCampionato, related_name="IncontroCasa") #squadra che gioca in casa
squadratrasferta = models.ForeignKey(SquadraCampionato, related_name="IncontroTransferta") #squadra che gioca in trasferta
disputato = models.BooleanField(default=False) # se l'incontro e' gia' stato disputato
golcasa = models.PositiveSmallIntegerField(default=0) # gol della squadra di casa
goltrasferta = models.PositiveSmallIntegerField(default=0) # gol della squadra in trasferta
def save(self, *args, **kwargs):
super(IncontroCampionato, self).save(*args, **kwargs)
def __unicode__(self):
string = str(self.data.astimezone(timezone(settings.TIME_ZONE)).strftime('%d-%m-%Y %H:%M')) + ("\t %s-%s" % (self.squadracasa, self.squadratrasferta))
if (self.disputato):
string = string + (" %d-%d" % (self.golcasa, self.goltrasferta))
return string
@property
def squadre(self):
return (self.squadracasa, self.squadratrasferta)
class Calciatore(models.Model):
"""calciatore di una data squadra"""
nome = models.CharField(max_length=40)
primavera = models.BooleanField(default=False)
squadra = models.ForeignKey(SquadraCampionato, blank=True, null=True)
scorsoanno = models.ForeignKey('self', blank=True, null=True) # altro oggetto calciatore corrispondente a se stesso l'anno prima
#i seguenti sono dati statistici relativi all'anno precedente, usati nella visualizzazione in fase d'asta
exsquadra = models.CharField(max_length=40)
quotazione = models.PositiveSmallIntegerField(blank=True, null=True)
fantamedia = models.FloatField(blank=True, null=True)
fantamediasq = models.FloatField(blank=True, null=True)
mediavoto = models.FloatField(blank=True, null=True)
presenze = models.PositiveSmallIntegerField(blank=True, null=True)
golfatti = models.PositiveSmallIntegerField(blank=True, null=True)
golsubiti = models.PositiveSmallIntegerField(blank=True, null=True)
rigoriparati = models.PositiveSmallIntegerField(blank=True, null=True)
ammonizioni = models.PositiveSmallIntegerField(blank=True, null=True)
espulsioni = models.PositiveSmallIntegerField(blank=True, null=True)
assist = models.PositiveSmallIntegerField(blank=True, null=True)
imageurl = models.URLField(blank=True, null=True) # url con un'immagine del giocatore
def save(self, *args, **kwargs):
if self.primavera:
ru = self.ruolo.first().nome
self.nome = 'Primavera ' + ru
self.exsquadra = ''
self.squadra = None
return super(Calciatore, self).save(*args, **kwargs)
def __unicode__(self):
return self.nome.title()
class Ruolo(models.Model):
calciatore = models.ForeignKey(Calciatore, related_name='ruolo')
redazione = models.ForeignKey(Redazione)
nome = models.CharField(max_length=5) # stringa indicante il ruolo
@property
def nome_lungo(self):
return ruoli_lunghi[self.nome]
class Formazione(models.Model):
"""contiene la formazione inviata da un allenatore"""
giocatori = models.ManyToManyField(Calciatore, through='Referto')
allenatore = models.ForeignKey(Allenatore)
data_invio = models.DateTimeField() #data d'invio della formazione
definitiva = models.BooleanField(default=False) # se la formazione e' definitiva, perche' per esempio riguarda match gia' cominciati
giornata = models.ForeignKey(Giornata) # tutte le partite che corrispondono a questa giornata
modulo = models.CharField(max_length=5, default = '4,4,2', validators=[validate_comma_separated_integer_list]) # modulo tipo '4,4,2'
#modulo = models.CommaSeparatedIntegerField(max_length=5, default='4,4,2')
def save(self, *args, **kwargs):
self.data_invio = datetime.datetime.now(timezone(settings.TIME_ZONE))
return super(Formazione, self).save(*args, **kwargs)
def clona(self):
"""restituisce una copia dell'oggetto formazione, copiando anche i referti associati"""
formazione = self
referti = list(formazione.referto_set.all())
formazione.id = None
formazione.save()
for ref in referti:
ref.id = None
ref.formazione_id = formazione.id
ref.save()
return formazione
def fantavoti(self, referti=None, redazione=None):
"""restituisce (calcolata_adesso, lista_giocatori) ove: calcolata_adesso e' True se l'ha ricalcolata o False se non ha fatto nulla,
mentre lista_giocatori e' una lista di tuple (referto, ruolo) dei giocatori che sono scesi in campo. Nello stesso tempo
imposta i referti. Il primo parametro e' False se non sono state effettuate modifiche."""
if redazione is None:
redazione = self.allenatore.lega.redazione
if referti is None:
referti = self.referto_set.order_by('posizione').select_related('voto').prefetch_related('calciatore__ruolo__redazione').all()
if not(any([ref.da_ricalcolare for ref in referti])): # la formazione e' vuota o gia' aggiornata
lista = [ (ref, filter(lambda x: x.redazione==redazione, ref.calciatore.ruolo.all())[0]) for ref in referti if ref.entrato_in_campo ]
return (False, lista)
referti.update(entrato_in_campo=False, modificatore=False)
titolari = referti[0:11]
riserve = referti[11:]
riserve = filter(lambda x: x.ha_giocato, riserve) # filtro solo le riserve che hanno giocato
# prima metto i titolari che hanno giocato
lista_giocatori_totale = [ (tit, filter(lambda x: x.redazione==redazione,tit.calciatore.ruolo.all())[0]) for tit in titolari] # produco una lista di coppie (referto, ruolo), per i titolari
print >>sys.stderr, "lista giocatori totale"
print >>sys.stderr, lista_giocatori_totale
lista_giocatori = filter(lambda x: x[0].ha_giocato, lista_giocatori_totale) # filtro quelli che hanno preso un voto
print >>sys.stderr, "lista giocatori in campo"
print >>sys.stderr, lista_giocatori
riserve_ruolo = {}
schierati_in_ruolo = {}
giocano_in_ruolo = {}
for ru in ruoli_lunghi.keys():
riserve_ruolo[ru] = filter(lambda x: filter(lambda y: y.redazione==redazione, x.calciatore.ruolo.all())[0].nome==ru, riserve)
schierati_in_ruolo[ru] = len(filter(lambda x: x[1].nome == ru, lista_giocatori_totale)) # conto i difensori schierati in formazione
giocano_in_ruolo[ru] = len(filter(lambda x: x[1].nome == ru, lista_giocatori)) # conto i giocatori che giocano in ciascun ruolo
riserve = filter(lambda x: filter(lambda y: y.redazione==redazione, x.calciatore.ruolo.all())[0].nome!='P', riserve)
# escludo i portieri perche' non possono fare cambi ruolo...
lista_da_sostituire = filter(lambda x: x[0].ha_giocato is False, lista_giocatori_totale) # lista di quelli che non hanno giocato
for ref, ru in lista_da_sostituire[0:3]: # posso fare al piu' tre cambi
print "cerco un sostituto per",ref.calciatore.nome
if (giocano_in_ruolo['A']>=3): # ci sono gia' 3 attaccanti, non possono piu' entrare, quindi svuoto le liste di attaccanti
riserve = filter(lambda x: filter(lambda y: y.redazione==redazione, x.calciatore.ruolo.all())[0].nome!='A', riserve)
riserve_ruolo['A'] = []
if (riserve_ruolo[ru.nome]): # c'e' almeno una riserva di questo ruolo
ris = riserve_ruolo[ru.nome].pop(0)
print "ho trovato", ris.calciatore.nome
if (ru.nome!='P'):
riserve.remove(ris)
lista_giocatori.append((ris, ru))
giocano_in_ruolo[ru.nome] += 1 # incremento di 1 quelli che giocano in questo ruolo
continue
print "Cerco un cambio ruolo"
#non sono riuscito a sostituirlo con uno dello stesso ruolo, cerco un cambio ruolo
if (ru == 'P'): # i portieri non fanno cambi ruolo
continue
if (ru == 'D' and giocano_in_ruolo[ru.nome] < 3): # se non ci sono almeno 3 difensori gia' in campo, non posso fare un cambio ruolo di difensori...
continue
if (riserve): # c'e' almeno una riserva disponibile, cerco un cambio ruolo
ris = riserve.pop(0)
print "Ho trovato", ris.calciatore.nome
ru_ris = filter(lambda x: x.redazione==redazione, ris.calciatore.ruolo.all())[0]
giocano_in_ruolo[ru_ris.nome] += 1 # incremento di 1 quelli che giocano in questo ruolo
riserve_ruolo[ru_ris.nome].remove(ris)
lista_giocatori.append((ris, ru_ris))
ref_entrati = [ref.id for (ref, ru) in lista_giocatori]
print >>sys.stderr, "ref entrati"
print >>sys.stderr, ref_entrati
ref_entrati = Referto.objects.filter(pk__in=ref_entrati).update(entrato_in_campo=True)
port_o_dif_in_campo = [ref.id for (ref, ru) in lista_giocatori if ru.nome=='D' or ru.nome=='P']
print >>sys.stderr, 'schierati in difesa'
print >>sys.stderr, schierati_in_ruolo['D']
print >>sys.stderr, 'port o dif'
print >>sys.stderr, port_o_dif_in_campo
if ( schierati_in_ruolo['D']>3 and len(port_o_dif_in_campo) >= 5): # se ha schierati piu' di 3 difensori ed hanno giocato effettivamente piu' di 3, metto il flag modificatore
Referto.objects.filter(pk__in=port_o_dif_in_campo).update(modificatore=True)
for (ref, ru) in lista_giocatori:
if(ref.id in port_o_dif_in_campo):
ref.modificatore = True # aggiorno il modificatore sulla copia locale
referti.update(da_ricalcolare=False)
return (True, lista_giocatori)
class GiornataLega(models.Model):
giornata = models.ForeignKey(Giornata, blank=True, null=True)
lega = models.ForeignKey(Lega)
numero = models.PositiveSmallIntegerField() # numero della giornata nella lega
def chiudi_giornata(self):
incontricoppa = IncontroCoppa.objects.filter(incontro_ritorno__giornatalega=self).select_related('incontro_andata','incontro_ritorno') # seleziono i turni di coppa in cui di cui si e' giocato un ritorno in questa giornata
if incontricoppa.count() == 0:
return # non c'e' un ritorno di coppa, esco
turnoattuale = incontricoppa[0].tipo
indiceturno = nome_turni_coppa.index(turnoattuale)
if indiceturno==0: # e' la finale, non ho niente da fare... esco (QUI SI PUO" INSERIRE LA PROCLAMAZIONE DEL VINCITORE DELLA COPPA
return
nuovo_turno = self.lega.incontrocoppa_set.filter(tipo=nome_turni_coppa[indiceturno-1])
#for turno in nome_turni_coppa[::-1][1:]: # cerco il prossimo turno
# nuovo_turno = self.lega.incontrocoppa_set.filter(tipo=turno)
# plnum = 2*nuovo_turno.count()
# if plnum>0:
# break
if (incontricoppa[0].tipo == nome_turni_coppa[-1]): # e' il ritorno del turno preliminare
#allenatori = self.lega.allenatori_set.all()
scarto_alle = {}
media_alle = {}
for inc in incontricoppa:
andata = inc.incontro_andata
ritorno = inc.incontro_ritorno
scarto_alle[andata.allenatorecasa.id] = andata.golcasa - andata.goltrasferta + ritorno.goltrasferta - ritorno.golcasa
scarto_alle[andata.allenatoretrasferta.id] = -(andata.golcasa - andata.goltrasferta + ritorno.goltrasferta - ritorno.golcasa)
media_alle[andata.allenatorecasa.id] = andata.fmcasa + ritorno.fmtrasferta
media_alle[andata.allenatoretrasferta.id] = andata.fmtrasferta + ritorno.fmcasa
alle_id = sorted(scarto_alle.keys(), key=lambda x: (scarto_alle[x], media_alle[x])) # ordina le teste di serie : 0 il peggiore -1 il migliore
alle_selected = alle_id[-plnum:] # vengono ripescati solo gli ultimi plnum
teste_di_serie = alle_selected[-plnum/2:][::-1]
sfidanti = alle_selected[:plnum/2]
for (forte,scarso,inccoppa) in zip(teste_di_serie, sfidanti, nuovo_turno.all()):
inccoppa.setta_allenatori(scarso,forte)
else: # non e' un turno preliminare
for inc in nuovo_turno:
inc.setta_allenatori_da_incontri()
class IncontroLega(models.Model):
""" si riferisce ad un incontro disputato tra due fantasquadre all'interno della lega """
allenatorecasa = models.ForeignKey(Allenatore, blank=True, null=True, related_name="IncontroCasa") # allenatore della squadra di casa
allenatoretrasferta = models.ForeignKey(Allenatore, blank=True, null=True, related_name="IncontroTrasferta") # allenatore della squadra in trasferta
formazionecasa = models.OneToOneField(Formazione, blank=True, null=True, related_name="IncontroCasa") # formazione della squadra in casa
formazionetrasferta = models.OneToOneField(Formazione, blank=True, null=True, related_name="IncontroTrasferta") # formazione della squadra in trasferta
lega = models.ForeignKey(Lega, blank=True, null=True) # lega a cui la giornata corrisponde
giornatalega = models.ForeignKey(GiornataLega) # giornata nella lega
fmcasa_nomod = models.DecimalField(default=0.0, max_digits=5, decimal_places = 2)
fmtrasferta_nomod = models.DecimalField(default=0.0, max_digits=5, decimal_places = 2)
modcasa = models.DecimalField(default=0.0, max_digits=5, decimal_places = 2)
modtrasferta = models.DecimalField(default=0.0, max_digits=5, decimal_places = 2)
#disputato = models.BooleanField(default=False) # se l'incontro e' gia' stato disputato o meno
def __unicode__(self):
if self.allenatorecasa is not None and self.allenatoretrasferta is not None:
return self.allenatorecasa.__unicode__() + " - " + self.allenatoretrasferta.__unicode__()
else:
return "da definire - da definire"
def short(self):
return self.allenatorecasa.__unicode__().replace(" ", "").replace(".","")[0:3] + " - " + self.allenatoretrasferta.__unicode__().replace(" ","").replace(".","")[0:3]
@property
def disputato(self):
if (self.fmcasa==0.0 or self.fmtrasferta==0.0):
return False
return True
def aggiorna_incontro(self, refertocasa=None, refertotrasferta=None, redazione=None, aggiorna_comunque=False):
formazioni = [self.formazionecasa, self.formazionetrasferta]
referti_formazioni = [refertocasa, refertotrasferta]
redazione = self.lega.redazione if self.lega is not None else ( self.IncontroCoppaAnd.lega.redazione if hasattr(self, 'IncontroCoppaAnd') else self.IncontroCoppaRit.lega.redazione)
modificatori = [0.0,0.0]
fm = [0.0,0.0]
da_aggiornare = aggiorna_comunque
calcolata_adesso = [False, False]
liste_giocatori = [None, None]
for ind,formazione in enumerate(formazioni):
if(formazione):
(calcolata_adesso[ind], liste_giocatori[ind]) = formazione.fantavoti(referti_formazioni[ind], redazione)
if da_aggiornare or calcolata_adesso[ind]:
conta_difensori = 0 # indice per contare quanti difensori ho gia' considerato
lista_difensori = [] # contiene tutti i difensori
for (ref, ru) in liste_giocatori[ind]:
fm[ind] = fm[ind] + ref.fantavoto
if(ref.modificatore):
modificatori[ind] = modificatori[ind] + ref.votopuro
if ru.nome=='D':
conta_difensori = conta_difensori+1
lista_difensori.append(ref.votopuro)
lista_difensori.sort()
lista_difensori.reverse()
min_difensori = sum(lista_difensori[3:]) # contiene il totale dei difensori oltre il terzo
conta_difensori = 4 # 3 difensori piu il portiere
modificatori[ind] = 0 if conta_difensori == 0 else (modificatori[ind]-min_difensori)/conta_difensori # prendo la media togliendo il minimo difensore (include il portiere)
modificatori[ind] = math.floor((modificatori[ind]-6.0)/0.5)*3
modificatori[ind] = 1.0 if modificatori[ind]==0 else max(modificatori[ind],0) # modificatore fa 1 se la media sta tra
if ind == 0:
self.modcasa = modificatori[ind]
self.fmcasa_nomod = fm[ind]
else:
self.modtrasferta = modificatori[ind]
self.fmtrasferta_nomod = fm[ind]
if any(calcolata_adesso) or da_aggiornare:
self.save()
return (da_aggiornare, liste_giocatori)
@property
def fmcasa(self):
return float(self.fmcasa_nomod) - float(self.modtrasferta) + 1.0
@property
def fmtrasferta(self):
return float(self.fmtrasferta_nomod) - float(self.modcasa)
@property
def golcasa(self):
return int(max(0, math.floor((float(self.fmcasa) - 66.0)/6.0)+1))
@property
def goltrasferta(self):
return int(max(0, math.floor((float(self.fmtrasferta) - 66.0)/6.0)+1))
class IncontroCoppa(models.Model):
""" si riferisce ad un incontro di coppa con andata e ritorno. Se i due allenatori sono settati, si sa gia' chi la disputera' e si comporta quindi come un incontro di lega
se invece sono settati gli incontri, vuol dire che a disputarla saranno i vincenti dei due incontri, una volta disputati"""
incontrocasa = models.ForeignKey("self", related_name="QualificataCasaPer", blank=True, null=True) # incontro da cui proviene la squadra di casa
incontrotrasferta = models.ForeignKey("self", related_name="QualificataTrasfertaPer", blank=True, null=True) # allenatore della squadra in trasferta
incontro_andata = models.OneToOneField(IncontroLega, blank=True, null=True, related_name="IncontroCoppaAnd") # incontro per il match d'andata
incontro_ritorno = models.OneToOneField(IncontroLega, blank=True, null=True, related_name="IncontroCoppaRit") # incontro per il match d'andata
lega = models.ForeignKey(Lega) # lega a cui la giornata corrisponde
tipo = models.CharField(max_length=20, default="Turno Preliminare") # specifica se si tratta di turno preliminare, quarti...
andata_ritorno = models.BooleanField(default=True) # specifica se il turno contiene andata e ritorno, se e' false i ritorni saranno nulli
indice = models.PositiveSmallIntegerField(default=0) # indice del turno e.g. 1o quarto, 2o quarto
@classmethod
def create(cls, allenatorecasa, allenatoretrasferta, giornata_andata, giornata_ritorno, **kwargs):
inclega_andata = IncontroLega(allenatorecasa=allenatorecasa, allenatoretrasferta=allenatoretrasferta, giornatalega=giornata_andata)
inclega_andata.save()
inclega_ritorno = None
if giornata_ritorno is not None:
inclega_ritorno = IncontroLega(allenatorecasa=allenatoretrasferta, allenatoretrasferta=allenatorecasa, giornatalega=giornata_ritorno)
inclega_ritorno.save()
inc = cls(incontro_andata = inclega_andata, incontro_ritorno=inclega_ritorno, **kwargs); # preliminare
return inc
class Meta:
unique_together = (("lega", "indice", "tipo"),) # per ogni lega e tipo c'e' un unico incontro
@property
def allenatorecasanome(self):
if (self.incontro_andata is not None and self.incontro_andata.allenatorecasa is not None):
return self.incontro_andata.allenatorecasa.__unicode__()
elif (self.incontrocasa is not None):
return ("Vincitore %s %d" % (self.incontrocasa.tipo, self.incontrocasa.indice))
return "da stabilire"
@property
def allenatoretrasfertanome(self):
if (self.incontro_andata is not None and self.incontro_andata.allenatoretrasferta is not None):
return self.incontro_andata.allenatoretrasferta.__unicode__()
elif (self.incontrotrasferta is not None):
return ("Vincitore %s %d" % (self.incontrotrasferta.tipo, self.incontrotrasferta.indice))
return "da stabilire"
def setta_allenatori(self, allcasa_id, alltrasferta_id): # imposta l'allenatore che gioca in casa/trasferta l'andata
self.incontro_andata.allenatorecasa_id = allcasa_id
self.incontro_andata.allenatoretrasferta_id = alltrasferta_id
self.incontro_andata.save()
if(self.incontro_ritorno):
self.incontro_ritorno.allenatorecasa_id = alltrasferta_id
self.incontro_ritorno.allenatoretrasferta_id = allcasa_id
self.incontro_ritorno.save()
def setta_allenatori_da_incontri(self):
vincitore_casa = self.incontrocasa.vincitore.id
vincitore_trasferta = self.incontrotrasferta.vincitore.id
self.setta_allenatori(vincitore_casa, vincitore_trasferta)
@property
def vincitore(self):
scarto = self.incontro_andata.golcasa + self.incontro_ritorno.goltrasferta - self.incontro_andata.goltrasferta - self.incontro_ritorno.golcasa
if scarto>0:
return self.incontro_andata.allenatorecasa
elif scarto<0:
return self.incontro_andata.allenatoretrasferta
else: # qui ci andrebbero i RIGORI!!!
return None
def __unicode__(self):
return self.allenatorecasanome + " - " + self.allenatoretrasfertanome
@receiver(post_delete, sender=IncontroCoppa)
def my_handler(sender, instance, **kwargs):
if instance.incontro_andata is not None:
instance.incontro_andata.delete()
if instance.incontro_ritorno is not None:
instance.incontro_ritorno.delete()
class Voto(models.Model):
"""Voto ricevuto da un calciatore in una certa giornata; contiene le informazioni sul voto puro e gli altri dati; il supporto per differenti redazioni e'
inserito tramite la variabile redazione """
redazione = models.ForeignKey(Redazione)
giornata = models.ForeignKey(Giornata)
calciatore = models.ForeignKey(Calciatore)
votopuro = models.DecimalField(default=6.0, max_digits=4, decimal_places = 2)
assist = models.PositiveSmallIntegerField(default=0)
golsuazione = models.PositiveSmallIntegerField(default=0)
golsurigore = models.PositiveSmallIntegerField(default=0)
ammo = models.PositiveSmallIntegerField(default=0)
espu = models.PositiveSmallIntegerField(default=0)
autogol = models.PositiveSmallIntegerField(default=0)
golsubiti = models.PositiveSmallIntegerField(default=0)
rigorisbagliati = models.PositiveSmallIntegerField(default=0)
rigoriparati = models.PositiveSmallIntegerField(default=0)
goldellavittoria = models.PositiveSmallIntegerField(default=0)
goldelpareggio = models.PositiveSmallIntegerField(default=0)
ha_giocato = models.BooleanField(default = False)
def aggiorna_referti(self):
refs = Referto.objects.filter(calciatore=self.calciatore, formazione__giornata=self.giornata, formazione__allenatore__lega__redazione=self.redazione).update(da_ricalcolare=True)
def save(self, *args, **kwargs):
super(Voto, self).save(*args, **kwargs)
self.aggiorna_referti()
class Referto(models.Model):
formazione = models.ForeignKey(Formazione, on_delete=models.CASCADE)
calciatore = models.ForeignKey(Calciatore, on_delete=models.CASCADE)
posizione = models.PositiveSmallIntegerField() # posizione in campo; i titolari sono da 1 a 11; gli altri sono panchinari. E' usato per ottenere i voti della squadra
voto = models.ForeignKey(Voto, blank=True, null=True)
votopuro_db = models.DecimalField(max_digits=4, decimal_places = 2, blank=True, null=True)
entrato_in_campo = models.BooleanField(default=False) # viene settato dalla funzione fantavoti di Formazione e dice se il giocatore schierato e' effettivamente andato a voto
modificatore = models.BooleanField(default=False) # se e' True il giocatore e' stato coinvolto in un modificatore
fantavoto_db = models.DecimalField(max_digits=4, decimal_places = 2, blank=True, null=True)
da_ricalcolare = models.BooleanField(default = True) # quando i voti vengono a giornare, questo flag diventa True, indicando che la formazone associata va ricalcolata
non_ha_giocato = models.BooleanField(default=False) # se e' true, non ha giocato indipendentemente da voto; se e' false guarda il voto
class Meta:
ordering = ['posizione']
def save(self, *args, **kwargs):
if self.calciatore.primavera:
ruolo = self.calciatore.ruolo.all()[0] # prendo un ruolo di una qualunque redazione
self.votopuro_db = votoprimavera[ruolo.nome]
self.fantavoto_db = votoprimavera[ruolo.nome]
self.voto = None
elif (not(self.id) or (not(self.voto) and not(self.calciatore.primavera)) or self.voto.calciatore_id != self.calciatore_id): # se il referto non e' associato ad un voto o se il calciatore del voto non coincide con quello del referto, cambio l'associazione
redazione = self.formazione.allenatore.lega.redazione
voto, created = Voto.objects.get_or_create(redazione=redazione, calciatore_id=self.calciatore_id, giornata=self.formazione.giornata)
self.voto = voto
super(Referto, self).save(*args, **kwargs)
@property
def votopuro(self):
"""se il voto e' scritto nel referto, lo uso, altrimenti lo prendo dall'oggetto Voto collegato"""
if(self.votopuro_db):
return float(self.votopuro_db)
return float(self.voto.votopuro)
@property
def fantavoto(self):
if(self.fantavoto_db):
return float(self.fantavoto_db)
return getattr(fantafun, self.formazione.allenatore.lega.calcolo_voto)(self.voto)
@property
def ha_giocato(self):
if (self.non_ha_giocato):
return False
if (self.fantavoto_db is not None):
return True
return self.voto.ha_giocato
class VotoForm(ModelForm):
""" Edit a house """
class Meta:
model = Voto
fields = ['ha_giocato','votopuro', 'assist',
'golsuazione', 'golsurigore', 'ammo', 'espu', 'autogol', 'golsubiti', 'rigorisbagliati', 'rigoriparati', 'goldellavittoria',
'goldelpareggio']
widgets = {'votopuro': NumberInput(attrs={'step': '0.5'})}
labels = {
'votopuro': _('Voto puro'),
'assist': _('Numero di assist'),
'golsuazione': _('Gol su azione'),
'golsurigore': _('Rigori segnati'),
'ammo': _('Ammonizioni'),
'espu': _('Espulsioni'),
'autogol': _('Autogol'),
'golsubiti': _('Gol subiti'),
'rigorisbagliati': _('Rigori sbagliati'),
'rigoriparati': _('Rigori parati'),
'goldellavittoria': _('Gol decisivi per la vittoria'),
'goldelpareggio': _('Gol decisivi per il pareggio'),
'ha_giocato': _("E' entrato in campo?")
}
class RefertoForm(ModelForm):
""" Edit a person and her house """
class Meta:
model = Referto
fields = ['non_ha_giocato', 'fantavoto_db', 'votopuro_db']
labels = {'non_ha_giocato': _('Imponi SV'),
'fantavoto_db': _('Fantavoto'),
'votopuro_db': _('Voto Puro')}
widgets = {'fantavoto_db': NumberInput(attrs={'step': 0.5}), 'votopuro_db': NumberInput(attrs={'step': 0.5})}
class TrasferimentoRosa(models.Model):
"""acquisto/cessione di un calciatore da parte di un allenatore"""
from asta.models import Asta
calciatore = models.ForeignKey(Calciatore) # il calciatore acquistato
valore = models.PositiveSmallIntegerField() # importo dell'acquisto/cessione (nel secondo caso sono i soldi recuperati
acquisto = models.BooleanField(default=True) # se e' un acquisto o una cessione
allenatore = models.ForeignKey(Allenatore) # l'allenatore che ha fatto il trasferimento
asta = models.ForeignKey(Asta, blank=True, null=True) # asta da cui proviene l'acquisto
def save(self, *args, **kwargs):
redazione = self.allenatore.lega.redazione
print >>sys.stderr, redazione.nome + " "+ self.calciatore.nome + " " + str(self.calciatore.id)
ru = self.calciatore.ruolo.get(redazione=redazione).nome
segno = 1 if self.acquisto else -1
self.allenatore.budget = self.allenatore.budget - segno*self.valore
if (self.acquisto):
self.allenatore.decresci_ruolo(ru)
else:
self.allenatore.cresci_ruolo(ru)
self.allenatore.full_clean(exclude=['logourl'])
self.allenatore.save()
super(TrasferimentoRosa, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
redazione = self.allenatore.lega.redazione
ru = self.calciatore.ruolo.get(redazione=redazione).nome
segno = 1 if self.acquisto else -1
self.allenatore.budget = self.allenatore.budget + self.valore*segno
if (self.acquisto):
self.allenatore.cresci_ruolo(ru)
else:
self.allenatore.decresci_ruolo(ru)
self.allenatore.full_clean(exclude=['logourl'])
self.allenatore.save()
super(TrasferimentoRosa, self).delete(*args, **kwargs)
def __unicode__(self):
return "%s da %s a %d" % (self.calciatore.nome,self.allenatore.nome, self.valore)
class TrasferimentoRosaForm(ModelForm):
def __init__(self, *args, **kwargs):
lega = kwargs.pop('lega')
super(TrasferimentoRosaForm, self).__init__(*args, **kwargs)
self.fields['calciatore'] = ModelChoiceField(queryset=Calciatore.objects.filter(squadra__campionato=lega.campionato).order_by('nome'))
class Meta:
fields = ['calciatore', 'valore', 'acquisto', 'allenatore']
labels = {
'calciatore': _("Calciatore coinvolto nell'acquisto"),
'valore': _("Importo (spesa o crediti ottenuti"),
'allenatore': _('Allenatore del trasferimento'),
}
model = TrasferimentoRosa
class ScambioForm(Form):
def __init__(self, *args, **kwargs):
lega = kwargs.pop('lega')
super(ScambioForm, self).__init__(*args, **kwargs)
self.fields['allenatore1'] = ModelChoiceField(queryset=lega.allenatore_set, error_messages={'required': 'Non puoi lasciare in bianco questo campo'})
self.fields['allenatore1'].label = 'Allenatore 1'
self.fields['calciatore1'] = ModelChoiceField(queryset=Calciatore.objects.filter(squadra__campionato=lega.campionato).order_by('nome'), error_messages={'required': 'Non puoi lasciare in bianco questo campo'})
self.fields['calciatore1'].label = 'Calciatore 1'
self.fields['allenatore2'] = ModelChoiceField(queryset=lega.allenatore_set, error_messages={'required': 'Non puoi lasciare in bianco questo campo'})
self.fields['allenatore2'].label = 'Allenatore 2'
self.fields['calciatore2'] = ModelChoiceField(queryset=Calciatore.objects.filter(squadra__campionato=lega.campionato).order_by('nome'), error_messages={'required': 'Non puoi lasciare in bianco questo campo'})
self.fields['calciatore2'].label = 'Calciatore 2'
self.fields['contropartita'] = IntegerField(min_value=-1000, max_value=1000)
self.fields['contropartita'].label = 'Contropartita (da 1 a 2)'
self.fields['contropartita'].initial = 0
def clean(self): # controllo che lo scambio fosse possibile
cleaned_data = super(ScambioForm, self).clean()
if self._errors: # ci sono gia' errori nel form
return cleaned_data
allenatore1 = cleaned_data['allenatore1']
allenatore2 = cleaned_data['allenatore2']
if allenatore1 == allenatore2:
raise ValidationError(_('I due allenatori non possono essere uguali!'))
rosa1 = allenatore1.ottieni_rosa()
rosa2 = allenatore2.ottieni_rosa()
contropartita = cleaned_data.get('contropartita',0)
calciatore1 = None
calciatore2 = None
for ruolo in rosa1.keys():
lst1 = [x for x in rosa1[ruolo] if x[0] == cleaned_data['calciatore1']]
lst2 = [x for x in rosa2[ruolo] if x[0] == cleaned_data['calciatore2']]
if len(lst1)==1: # ho trovato il calciatore nella rosa dell'allenatore 1
if len(lst2)!=1:
raise ValidationError(_('I due giocatori non hanno lo stesso ruolo'))
(calciatore1, costo1) = lst1[0]
(calciatore2, costo2) = lst2[0]
break
if calciatore1 is None:
raise ValidationError(_('L\'allenatore dei %(all)s non possiede %(cal)s'), params={'all': allenatore1, 'cal': cleaned_data['calciatore1']})
if calciatore2 is None:
raise ValidationError(_('L\'allenatore dei %(all)s non possiede %(cal)s'), params={'all': allenatore2, 'cal': cleaned_data['calciatore2']})
if (contropartita>0 and allenatore1.budget < contropartita) or (contropartita<0 and allenatore2.budget < -contropartita):
raise ValidationError(_('Crediti insufficienti all\'acquisto.'))
cleaned_data['costo1'] = costo1
cleaned_data['costo2'] = costo2
return cleaned_data
|
[
"abenassen@users.noreply.github.com"
] |
abenassen@users.noreply.github.com
|
e8c097049a41c7f9bd4cff7d1c145fb15c0ccdff
|
3d457fa0163f68df31327dcd6a28e0f950adec9a
|
/vbmc4vsphere/vbmc.py
|
23100a37dd8f565e889145512606f58f275b5bd0
|
[
"Apache-2.0"
] |
permissive
|
aksoydoruk/virtualbmc-for-vsphere
|
ed63f49ce11f8b17c2f6e023e34f04c27d049c37
|
03cb2befb17d830fcd4d686a641e8aaa052f4117
|
refs/heads/main
| 2023-08-15T09:52:28.878208
| 2021-10-16T04:38:59
| 2021-10-16T04:38:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,237
|
py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# import xml.etree.ElementTree as ET
import struct
import traceback
import pyghmi.ipmi.bmc as bmc
import pyghmi.ipmi.private.session as ipmisession
from pyghmi.ipmi.private.serversession import IpmiServer as ipmiserver
from pyghmi.ipmi.private.serversession import ServerSession as serversession
from vbmc4vsphere import exception, log, utils
LOG = log.get_logger()
# Power states
POWEROFF = 0
POWERON = 1
# From the IPMI - Intelligent Platform Management Interface Specification
# Second Generation v2.0 Document Revision 1.1 October 1, 2013
# https://www.intel.com/content/dam/www/public/us/en/documents/product-briefs/ipmi-second-gen-interface-spec-v2-rev1-1.pdf
#
# Command failed and can be retried
IPMI_COMMAND_NODE_BUSY = 0xC0
# Invalid data field in request
IPMI_INVALID_DATA = 0xCC
# Boot device maps
GET_BOOT_DEVICES_MAP = {
"ethernet": 0x4,
"disk": 0x8,
"cdrom": 0x14,
"floppy": 0x3C,
}
SET_BOOT_DEVICES_MAP = {
"network": "ethernet",
"hd": "disk",
"optical": "cdrom",
"floppy": "floppy",
}
def sessionless_data(self, data, sockaddr):
"""Examines unsolocited packet and decides appropriate action.
For a listening IpmiServer, a packet without an active session
comes here for examination. If it is something that is utterly
sessionless (e.g. get channel authentication), send the appropriate
response. If it is a get session challenge or open rmcp+ request,
spawn a session to handle the context.
Patched by VirtualBMC for vSphere to handle sessionless IPMIv2
packet and ASF Presence Ping.
Based on pyghmi 1.5.16, Apache License 2.0
https://opendev.org/x/pyghmi/src/branch/master/pyghmi/ipmi/private/serversession.py
"""
data = bytearray(data)
if len(data) < 22:
if data[0:4] == b"\x06\x00\xff\x06" and data[8] == 0x80: # asf presence ping
LOG.info("Responding to asf presence ping")
self.send_asf_presence_pong(data, sockaddr)
else:
return
if not (data[0] == 6 and data[2:4] == b"\xff\x07"): # not ipmi
return
authtype = data[4]
if authtype == 6: # ipmi 2 payload...
payloadtype = data[5]
if payloadtype not in (0, 16):
return
if payloadtype == 16: # new session to handle conversation
serversession(
self.authdata,
self.kg,
sockaddr,
self.serversocket,
data[16:],
self.uuid,
bmc=self,
)
return
# ditch two byte, because ipmi2 header is two
# bytes longer than ipmi1 (payload type added, payload length 2).
data = data[2:]
myaddr, netfnlun = struct.unpack("2B", bytes(data[14:16]))
netfn = (netfnlun & 0b11111100) >> 2
mylun = netfnlun & 0b11
if netfn == 6: # application request
if data[19] == 0x38: # cmd = get channel auth capabilities
verchannel, level = struct.unpack("2B", bytes(data[20:22]))
version = verchannel & 0b10000000
if version != 0b10000000:
return
channel = verchannel & 0b1111
if channel != 0xE:
return
(clientaddr, clientlun) = struct.unpack("BB", bytes(data[17:19]))
clientseq = clientlun >> 2
clientlun &= 0b11 # Lun is only the least significant bits
level &= 0b1111
if authtype == 6:
self.send_auth_cap_v2(
myaddr, mylun, clientaddr, clientlun, clientseq, sockaddr
)
else:
self.send_auth_cap(
myaddr, mylun, clientaddr, clientlun, clientseq, sockaddr
)
elif data[19] == 0x54:
clientaddr, clientlun = data[17:19]
clientseq = clientlun >> 2
clientlun &= 0b11
self.send_cipher_suites(
myaddr, mylun, clientaddr, clientlun, clientseq, data, sockaddr
)
def send_auth_cap_v2(self, myaddr, mylun, clientaddr, clientlun, clientseq, sockaddr):
"""Send response to "get channel auth cap (0x38)" command with IPMI 2.0 headers.
Copied from send_auth_cap function and modified to send response
in the form of IPMI 2.0.
Based on pyghmi 1.5.16, Apache License 2.0
https://opendev.org/x/pyghmi/src/branch/master/pyghmi/ipmi/private/serversession.py
"""
header = bytearray(
b"\x06\x00\xff\x07\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00"
)
headerdata = [clientaddr, clientlun | (7 << 2)]
headersum = ipmisession._checksum(*headerdata)
header += bytearray(
headerdata + [headersum, myaddr, mylun | (clientseq << 2), 0x38]
)
header += self.authcap
bodydata = struct.unpack("B" * len(header[19:]), bytes(header[19:]))
header.append(ipmisession._checksum(*bodydata))
ipmisession._io_sendto(self.serversocket, header, sockaddr)
def send_asf_presence_pong(self, data, sockaddr):
"""Send response to ASF Presence Ping."""
header = bytearray(
b"\x06\x00\xff\x06\x00\x00\x11\xbe\x40"
+ struct.pack("B", data[9])
+ b"\x00\x10\x00\x00\x11\xbe\x00\x00\x00\x00\x81\x00\x00\x00\x00\x00\x00\x00"
)
ipmisession._io_sendto(self.serversocket, header, sockaddr)
# Patch pyghmi with modified functions
ipmiserver.sessionless_data = sessionless_data
ipmiserver.send_auth_cap_v2 = send_auth_cap_v2
ipmiserver.send_asf_presence_pong = send_asf_presence_pong
class VirtualBMC(bmc.Bmc):
def __init__(
self,
username,
password,
port,
address,
fakemac,
vm_name,
viserver,
viserver_username=None,
viserver_password=None,
**kwargs
):
super(VirtualBMC, self).__init__(
{username: password}, port=port, address=address
)
self.vm_name = vm_name
self.fakemac = fakemac
self._conn_args = {
"vi": viserver,
"vi_username": viserver_username,
"vi_password": viserver_password,
}
def get_boot_device(self):
LOG.debug("Get boot device called for %(vm)s", {"vm": self.vm_name})
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
boot_element = vm.config.bootOptions.bootOrder
boot_dev = None
if boot_element:
boot_dev = utils.get_bootable_device_type(conn, boot_element[0])
LOG.debug("Boot device is: %s" % boot_dev)
return GET_BOOT_DEVICES_MAP.get(boot_dev, 0)
return IPMI_COMMAND_NODE_BUSY
except Exception as e:
msg = "Error getting boot device of vm %(vm)s. " "Error: %(error)s" % {
"vm": self.vm_name,
"error": e,
}
LOG.error(msg)
raise exception.VirtualBMCError(message=msg)
def _remove_boot_elements(self, parent_element):
for boot_element in parent_element.findall("boot"):
parent_element.remove(boot_element)
def set_boot_device(self, bootdevice):
LOG.debug(
"Set boot device called for %(vm)s with boot " 'device "%(bootdev)s"',
{"vm": self.vm_name, "bootdev": bootdevice},
)
device = SET_BOOT_DEVICES_MAP.get(bootdevice)
if device is None:
# Invalid data field in request
return IPMI_INVALID_DATA
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
utils.set_boot_device(conn, vm, device)
except Exception as e:
LOG.error(
"Failed setting the boot device %(bootdev)s for vm %(vm)s."
"Error: %(error)s",
{"bootdev": device, "vm": self.vm_name, "error": e},
)
# Command failed, but let client to retry
return IPMI_COMMAND_NODE_BUSY
def get_power_state(self):
LOG.debug("Get power state called for vm %(vm)s", {"vm": self.vm_name})
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
if "poweredOn" == vm.runtime.powerState:
return POWERON
except Exception as e:
msg = "Error getting the power state of vm %(vm)s. " "Error: %(error)s" % {
"vm": self.vm_name,
"error": e,
}
LOG.error(msg)
raise exception.VirtualBMCError(message=msg)
return POWEROFF
def pulse_diag(self):
LOG.debug("Power diag called for vm %(vm)s", {"vm": self.vm_name})
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
utils.send_nmi(conn, vm)
LOG.debug(
"The NMI will be sent to the vm %(vm)s 60 seconds later",
{"vm": self.vm_name},
)
except Exception as e:
LOG.error(
"Error powering diag the vm %(vm)s. " "Error: %(error)s",
{"vm": self.vm_name, "error": e},
)
# Command failed, but let client to retry
return IPMI_COMMAND_NODE_BUSY
def power_off(self):
LOG.debug("Power off called for vm %(vm)s", {"vm": self.vm_name})
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
if "poweredOn" == vm.runtime.powerState:
vm.PowerOff()
except Exception as e:
LOG.error(
"Error powering off the vm %(vm)s. " "Error: %(error)s",
{"vm": self.vm_name, "error": e},
)
# Command failed, but let client to retry
return IPMI_COMMAND_NODE_BUSY
def power_on(self):
LOG.debug("Power on called for vm %(vm)s", {"vm": self.vm_name})
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
if "poweredOn" != vm.runtime.powerState:
vm.PowerOn()
except Exception as e:
LOG.error(
"Error powering on the vm %(vm)s. " "Error: %(error)s",
{"vm": self.vm_name, "error": e},
)
# Command failed, but let client to retry
return IPMI_COMMAND_NODE_BUSY
def power_shutdown(self):
LOG.debug("Soft power off called for vm %(vm)s", {"vm": self.vm_name})
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
if "poweredOn" == vm.runtime.powerState:
vm.ShutdownGuest()
except Exception as e:
LOG.error(
"Error soft powering off the vm %(vm)s. " "Error: %(error)s",
{"vm": self.vm_name, "error": e},
)
# Command failed, but let client to retry
return IPMI_COMMAND_NODE_BUSY
def power_reset(self):
LOG.debug("Power reset called for vm %(vm)s", {"vm": self.vm_name})
try:
with utils.viserver_open(**self._conn_args) as conn:
vm = utils.get_viserver_vm(conn, self.vm_name)
if "poweredOn" == vm.runtime.powerState:
vm.Reset()
except Exception as e:
LOG.error(
"Error reseting the vm %(vm)s. " "Error: %(error)s",
{"vm": self.vm_name, "error": e},
)
# Command not supported in present state
return IPMI_COMMAND_NODE_BUSY
def get_channel_access(self, request, session):
"""Fake response to "get channel access" command.
Send dummy packet to response "get channel access" command.
Just exists to be able to negotiate with vCenter Server.
"""
data = [
0b00100010, # alerting disabled, auth enabled, always available
0x04, # priviredge level limit = administrator
]
session.send_ipmi_response(data=data)
def get_channel_info(self, request, session):
"""Fake response to "get channel access" command.
Send dummy packet to response "get channel access" command
as 802.3 LAN channel.
Just exists to be able to negotiate with vCenter Server.
"""
data = [
0x02, # channel number = 2
0x04, # channel medium type = 802.3 LAN
0x01, # channel protocol type = IPMB-1.0
0x80, # session support = multi-session
0xF2, # vendor id = 7154
0x1B, # vendor id = 7154
0x00, # vendor id = 7154
0x00, # reserved
0x00, # reserved
]
session.send_ipmi_response(data=data)
def get_lan_configuration_parameters(self, request, session):
"""Fake response to "get lan conf params" command.
Send dummy packet to response "get lan conf params" command
with fake MAC address.
Just exists to be able to negotiate with vCenter Server.
"""
data = [0] # the first byte is revision, force to 0 as a dummy
req_param = request["data"][1]
LOG.info("Requested parameter = %s" % req_param)
if req_param == 5: # mac address
data.extend(utils.convert_fakemac_string_to_bytes(self.fakemac))
else:
pass
LOG.info("ne: %s" % data)
if len(data) > 1:
session.send_ipmi_response(data=data)
else:
session.send_ipmi_response(data=data, code=0x80)
def handle_raw_request(self, request, session):
"""Call the appropriate function depending on the received command.
Based on pyghmi 1.5.16, Apache License 2.0
https://opendev.org/x/pyghmi/src/branch/master/pyghmi/ipmi/bmc.py
"""
# | FNC:CMD | NetFunc | Command |
# | --------- | ----------------|------------------------------------ |
# | 0x00:0x00 | Chassis | Chassis Capabilities |
# | 0x00:0x01 | Chassis | Get Chassis Status |
# | 0x00:0x02 | Chassis | Chassis Control |
# | 0x00:0x08 | Chassis | Set System Boot Options |
# | 0x00:0x09 | Chassis | Get System Boot Options |
# | 0x04:0x2D | Sensor/Event | Get Sensor Reading |
# | 0x04:0x2F | Sensor/Event | Get Sensor Type |
# | 0x04:0x30 | Sensor/Event | Set Sensor Reading and Event Status |
# | 0x06:0x01 | App | Get Device ID |
# | 0x06:0x02 | App | Cold Reset |
# | 0x06:0x03 | App | Warm Reset |
# | 0x06:0x04 | App | Get Self Test Results |
# | 0x06:0x08 | App | Get Device GUID |
# | 0x06:0x22 | App | Reset Watchdog Timer |
# | 0x06:0x24 | App | Set Watchdog Timer |
# | 0x06:0x2E | App | Set BMC Global Enables |
# | 0x06:0x31 | App | Get Message Flags |
# | 0x06:0x35 | App | Read Event Message Buffer |
# | 0x06:0x36 | App | Get BT Interface Capabilities |
# | 0x06:0x40 | App | Set Channel Access |
# | 0x06:0x41 | App | Get Channel Access |
# | 0x06:0x42 | App | Get Channel Info Command |
# | 0x0A:0x10 | Storage | Get FRU Inventory Area Info |
# | 0x0A:0x11 | Storage | Read FRU Data |
# | 0x0A:0x12 | Storage | Write FRU Data |
# | 0x0A:0x40 | Storage | Get SEL Info |
# | 0x0A:0x42 | Storage | Reserve SEL |
# | 0x0A:0x44 | Storage | Add SEL Entry |
# | 0x0A:0x48 | Storage | Get SEL Time |
# | 0x0A:0x49 | Storage | Set SEL Time |
# | 0x0C:0x01 | Transport | Set LAN Configuration Parameters |
# | 0x0C:0x02 | Transport | Get LAN Configuration Parameters |
# | 0x2C:0x00 | Group Extension | Group Extension Command |
# | 0x2C:0x03 | Group Extension | Get Power Limit |
# | 0x2C:0x04 | Group Extension | Set Power Limit |
# | 0x2C:0x05 | Group Extension | Activate/Deactivate Power Limit |
# | 0x2C:0x06 | Group Extension | Get Asset Tag |
# | 0x2C:0x08 | Group Extension | Set Asset Tag |
LOG.info(
"Received netfn = 0x%x (%d), command = 0x%x (%d), data = %s"
% (
request["netfn"],
request["netfn"],
request["command"],
request["command"],
request["data"].hex(),
)
)
try:
if request["netfn"] == 6:
if request["command"] == 1: # get device id
return self.send_device_id(session)
elif request["command"] == 2: # cold reset
return session.send_ipmi_response(code=self.cold_reset())
elif request["command"] == 0x41: # get channel access
return self.get_channel_access(request, session)
elif request["command"] == 0x42: # get channel info
return self.get_channel_info(request, session)
elif request["command"] == 0x48: # activate payload
return self.activate_payload(request, session)
elif request["command"] == 0x49: # deactivate payload
return self.deactivate_payload(request, session)
elif request["netfn"] == 0:
if request["command"] == 1: # get chassis status
return self.get_chassis_status(session)
elif request["command"] == 2: # chassis control
return self.control_chassis(request, session)
elif request["command"] == 8: # set boot options
return self.set_system_boot_options(request, session)
elif request["command"] == 9: # get boot options
return self.get_system_boot_options(request, session)
elif request["netfn"] == 12:
if request["command"] == 2: # get lan configuration parameters
return self.get_lan_configuration_parameters(request, session)
session.send_ipmi_response(code=0xC1)
except NotImplementedError:
session.send_ipmi_response(code=0xC1)
except Exception:
session._send_ipmi_net_payload(code=0xFF)
traceback.print_exc()
|
[
"2920259+kurokobo@users.noreply.github.com"
] |
2920259+kurokobo@users.noreply.github.com
|
fa1a2a2da4ce8b7c223ecec643abba112a7b6263
|
bb10451047bf3d8e6b5c37a0bbf2a73729340260
|
/preprocessing/tracking.py
|
5e7c829ed110b26db2fd923b1821afa467ad4fc8
|
[] |
no_license
|
jocelynqiaoqian/Computer_Vision_Project
|
cc9724d9c49ad5c3cfc8fcaa0b6a560d3c246852
|
3899690107d2f6b52220e731379152ce60a4b76d
|
refs/heads/master
| 2021-05-04T14:03:28.979975
| 2017-11-21T02:14:56
| 2017-11-21T02:14:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,728
|
py
|
import cv2
import sys
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
if __name__ == '__main__' :
# Set up tracker.
# Instead of MIL, you can also use
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN']
tracker_type = tracker_types[4]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
# Read video
video = cv2.VideoCapture("../results/output_CandyEdge.m4v")
# Exit if video not opened.
if not video.isOpened():
print "Could not open video"
sys.exit()
# Read first frame.
ok, frame = video.read()
if not ok:
print 'Cannot read video file'
sys.exit()
# Define an initial bounding box
bbox = (287, 23, 86, 320)
# Uncomment the line below to select a different bounding box
bbox = cv2.selectROI(frame, False)
# Initialize tracker with first frame and bounding box
ok = tracker.init(frame, bbox)
while True:
# Read a new frame
ok, frame = video.read()
if not ok:
break
# Start timer
timer = cv2.getTickCount()
# Update tracker
ok, bbox = tracker.update(frame)
# Calculate Frames per second (FPS)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer)
# Draw bounding box
if ok:
# Tracking success
p1 = (int(bbox[0]), int(bbox[1]))
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3]))
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
else :
# Tracking failure
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# Display tracker type on frame
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
# Display FPS on frame
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
# Display result
cv2.imshow("Tracking", frame)
# Exit if ESC pressed
k = cv2.waitKey(1) & 0xff
if k == 27 : break
|
[
"jf773@cornell.edu"
] |
jf773@cornell.edu
|
fbe792816c6f306d39a1d6f860a9c02c7704b0a8
|
2c54a93bb144871a821ccc87f67f0b98a9140a5b
|
/.metadata/.plugins/org.eclipse.core.resources/.history/c7/e01b29f4b39900171d01ac63d6c54cb2
|
689f146401abceab6d37c8d3f12859cf3e418bad
|
[] |
no_license
|
hashem65/SwarmInteligenceOpt
|
02ea0ad1cee4e422358f0fffec22cf40aeab940c
|
091eb75a1003bb7c6b349c5e9a2b66f3c65ab893
|
refs/heads/master
| 2021-09-09T10:27:51.311955
| 2018-03-15T06:43:25
| 2018-03-15T06:43:25
| 104,139,579
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,127
|
#!/usr/bin/env python
from numpy import array
from random import random
from math import sin, sqrt
iter_max = 10000
pop_size = 100
dimensions = 20
c1 = 2
c2 = 2
err_crit = 0.00001
class Particle:
pass
def f6(param):
para = param*20
para = param[0:20]
num = (sin(sqrt((para[0] * para[0]) + (para[9] * para[1])))) * \
(sin(sqrt((para[2] * para[1]) + (para[8] * para[4])))) - 0.5 + \
(sin(sqrt((para[3] * para[3]) + (para[6] * para[2])))) * \
(sin(sqrt((para[5] * para[4]) + (para[7] * para[4]))))
denom = (1.0 + 0.001 * ((para[0] * para[10]) + (para[14] * para[12]))) * \
(1.0 + 0.001 * ((para[10] * para[10]) + (para[15] * para[12])))
f6 = 0.5 - (num/denom)
errorf6 = 1 - f6
return f6, errorf6;
#initialize the particles
particles = []
for i in range(pop_size):
p = Particle()
p.params = array([random() for i in range(dimensions)])
p.fitness = 0.0
p.v = 0.0
particles.append(p)
# let the first particle be the global best
gbest = particles[0]
err = 999999999
while i < iter_max :
for p in particles:
fitness,err = f6(p.params)
if fitness > p.fitness:
p.fitness = fitness
p.best = p.params
if fitness > gbest.fitness:
gbest = p
v = p.v + c1 * random() * (p.best - p.params) \
+ c2 * random() * (gbest.params - p.params)
p.params = p.params + v
i += 1
if err < err_crit:
break
#progress bar. '.' = 10%
if i % (iter_max/10) == 0:
print '.'
print '\nParticle Swarm Optimisation\n'
print 'PARAMETERS\n','-'*9
print 'Population size : ', pop_size
print 'Dimensions : ', dimensions
print 'Error Criterion : ', err_crit
print 'c1 : ', c1
print 'c2 : ', c2
print 'function : f6'
print 'RESULTS\n', '-'*7
print 'gbest fitness : ', gbest.fitness
print 'gbest params : ', gbest.params
print 'iterations : ', i+1
## Uncomment to print particles
#for p in particles:
# print 'params: %s, fitness: %s, best: %s' % (p.params, p.fitness, p.best)
|
[
"hyou267@aucklanduni.ac.nz"
] |
hyou267@aucklanduni.ac.nz
|
|
e8fe52a1dfc9be4acfa62d7b5331c459b99b3c95
|
f22d4319e6f848202fe847f9190b78ceaae8ed12
|
/envExemplo/Lista15/cotacoes.py
|
d24d99c34eac2570cbd61758b10295c149e9ff19
|
[] |
no_license
|
AlexandreLouzada/Pyquest
|
7ecc0a3e3002df169bd53ae99e66c54019782698
|
29f0e67e5902fad0fc336ece63c9e5d3868d6b09
|
refs/heads/master
| 2023-08-30T22:54:46.438567
| 2023-08-21T19:04:39
| 2023-08-21T19:04:39
| 248,549,732
| 10
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 340
|
py
|
import requests
def obter_cotacao_moedas():
url = "https://api.exchangerate-api.com/v4/latest/BRL"
response = requests.get(url)
data = response.json()
if response.status_code == 200:
return data["rates"]
else:
print(f"Erro ao obter as cotações de moedas: {response.status_code}")
return None
|
[
"professorlouzada@gmail.com"
] |
professorlouzada@gmail.com
|
119eaee92a26b990c18be5c539d66fc169f18d94
|
6f9573c6ebc9d6431f610c540e281db11362cb43
|
/tree.py
|
f4c38bff6e4f261e805e802e0a30c11610383431
|
[] |
no_license
|
llkhacquan/url-remove-prediction
|
ac92fd3ffcd4ec6593f39d20bdfd96c7de4d0166
|
5d4a61e0305d41892b5481d765f7a1930c11f47b
|
refs/heads/master
| 2020-05-17T08:21:39.739323
| 2019-05-04T08:05:37
| 2019-05-04T08:05:37
| 183,603,202
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,927
|
py
|
import numpy as np
import pandas as pd
import sys
import os
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import BayesianRidge
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
from sklearn import tree, preprocessing
from sklearn import svm
from subprocess import call
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s')
if len(sys.argv) > 1:
data_file = sys.argv[1]
else:
logging.error(
'usage: python3 tree.py <data-feature-file> [prediction_output_file]')
exit(-1)
logging.info('Using feature file ' + data_file)
if len(sys.argv) > 2:
host = sys.argv[2]
else:
logging.error(
'usage: python3 tree.py <data-feature-file> [prediction_output_file]')
exit(-1)
data = pd.read_csv(data_file, sep=' ', header=None)
print("Dataset length", len(data))
print("Dataset shape", data.shape)
print("Data example:")
print(data.head())
X = data.values[:, 1:]
Y = data.values[:, 0].astype('int')
test_size = 1 - min(100000/len(data), 0.5)
print("Test size =", test_size, "(", len(data)*test_size, ")")
print("Training size =", 1-test_size, "(", len(data)*(1-test_size), ")")
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size=test_size)
clf = DecisionTreeClassifier()
logging.info("start training")
clf.fit(X_train[:, 2:], y_train)
logging.info("done training")
score = clf.score(X_test[:, 2:], y_test)
print("score", score)
y_pred = clf.predict(X_test[:, 2:])
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
if len(sys.argv) > 2:
logging.info("Extract predicted result to %s", (sys.argv[2]))
dummy_data1 = [y_pred, y_test, X_test[:, 0]]
df1 = pd.DataFrame(dummy_data1).transpose()
df1.to_csv(sys.argv[2], sep=' ', header=None, index=False)
logging.info("Done")
|
[
"quannk@coccoc.com"
] |
quannk@coccoc.com
|
a100b36ab4229b73b6da9f86bed3a4cf38b9a2ef
|
42cb644e9e3aab54d0ed32c33e7772c0f9716c07
|
/task3/prog/publ.py
|
b0ea392105d2b00bf316e0a6dcf424620dbc6fbb
|
[] |
no_license
|
tyomaz/practica2020
|
864cbbc58aab90a430ec84decd89298ccebff570
|
15a2b35e0926d2a40221357feed37af27150fcc2
|
refs/heads/master
| 2022-11-25T20:05:58.330691
| 2020-07-24T07:31:40
| 2020-07-24T07:31:40
| 276,693,157
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,522
|
py
|
import time
import random
from multiprocessing import Manager, Process
from influxdb import InfluxDBClient
class Var:
def __init__(self, name, begin_value, change_range):
self.name = name
self.value: float = float(begin_value)
self.change_rate: float = float(change_range)
print(self.value, self.change_rate)
class PublisherClass:
def __init__(self):
self._client = InfluxDBClient(host='influx', port=8086, database="name")
self.manager = Manager()
self._lc = Manager().Lock()
self._var_list = self.manager.list()
self.wrk = None
def add_field(self, variable: Var):
self._lc.acquire(blocking=True)
self._var_list.append(variable)
self._lc.release()
def start_work(self):
if self.wrk is not None:
return False
self.wrk = Process(target=PublisherClass.work_f, args=[self])
self.wrk.start()
return True
def del_field(self, nm):
index = -1
self._lc.acquire(blocking=True)
for a in range(len(self._var_list)):
if self._var_list[a].name == nm:
index = a
break
rv = True
if index != -1:
self._var_list.pop(index)
else:
rv = False
self._lc.release()
return rv
def stop_work(self):
if self.wrk is None:
return False
self._lc.acquire(blocking=True)
self.wrk.terminate()
self.wrk.join()
self.wrk = None
self._lc.release()
return True
def work_f(self):
while True:
if len(self._var_list) != 0:
self._lc.acquire(blocking=True)
tm = {}
for a in self._var_list:
tm[a.name] = a.value
self._client.write_points([
{
"measurement": "mes",
"fields": tm
}
])
for a in range(len(self._var_list)):
rr = self._var_list[a]
rr.value = -1 + 2 * random.randint(0, 1) + rr.change_rate * random.random()
self._var_list[a] = rr
self._lc.release()
time.sleep(1)
def get_var_list(self):
self._lc.acquire(blocking=True)
r_m = {}
for a in self._var_list:
r_m[a.name] = a.value
self._lc.release()
return r_m
|
[
"noreply@github.com"
] |
tyomaz.noreply@github.com
|
4609e99a44628726eb2f9570506891c0b578045c
|
495db4d37d0bf4f19d12cbae49768c517c8ab701
|
/09_numerosRomanos.py
|
1722936b21a0aa6182b57c71192df2651da0d872
|
[] |
no_license
|
anbreaker/retosKeepcoding
|
34b786489071c064e1b0ec2182d40e7c92b84575
|
96eed8c53daccb88ac53671dfd6f107f0f63fd55
|
refs/heads/master
| 2020-08-24T18:04:37.385885
| 2019-12-12T23:02:06
| 2019-12-12T23:02:06
| 216,878,463
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,820
|
py
|
# Conversor de numeros romanos en python
# I 1 | V 5 | X 10
# L 50 | C 100 | D 500 | M 1000
# valoresNumerosRomanos = {1: 'I', 5: 'V', 10: 'X', 50: 'L', 100: 'C', 500: 'D', 1000: 'M'}
numRomanos = {'M': 1000, 'CM': 900, 'D': 500, 'C': 100,
'XC': 90, 'L': 50, 'X': 10, 'IX': 9, 'V': 5, 'I': 1}
def descomponiendoNumArabigo(numero): # Convertir a romano:
resultado = ''
for item in numRomanos:
cociente = numero // numRomanos.get(item)
if cociente > 0:
numero = numero - numRomanos.get(item)*cociente
mayorTres = item*cociente
if mayorTres == 'CCCC':
resultado += 'CD'
elif mayorTres == 'XXXX':
resultado += 'XL'
elif mayorTres == 'IIII':
resultado += 'IV'
else:
resultado += mayorTres
# print(f'Letra Romana {item}\'s--> {cociente}\t num Arabe--> {numRomanos.get(item)}')
# print(f'Resultado al final --> {resultado}')
return resultado
'''
def validarNumero(numero):
if numero > 0:
# print(f'en la def {numero}')
return True
else:
return False
# Pedir opciones
numero = 0
while not validarNumero(numero):
try:
if not validarNumero(numero):
numero = int(input('Introduzca un numero entero: '))
except ValueError:
print('Debes introducir sólo numéros enteros positivos. ')
'''
def descomponiendoNumRomano(numRoman):
resultado = 0
for item in numRoman:
resultado += numRomanos.get(item)
print(resultado)
return resultado
numero = 199
numRoman = 'XIV'
# print(f'El numero {numero} en romano es: {descomponiendoNumArabigo(numero)}')
print(f'El numero {numRoman} romano es--> {descomponiendoNumRomano(numRoman)}')
|
[
"antunez19@gmail.com"
] |
antunez19@gmail.com
|
d429f1a177c70f82dd239485cf71663e94656612
|
aef5738b63824bfb3e04bbfbe490c4cdbf2737c7
|
/samples/Ex_Files_Python_Gen/Ch3/03_02/coroutine_example.py
|
62536c0a3a74284ea008918ae9dd0366201c56f3
|
[
"MIT"
] |
permissive
|
akashtalole/python-samples
|
15a25287153d0f19b2f0f3dd14bdac606f585c1d
|
9dc9ed9410f210b09154bd53efe1b741a15907a4
|
refs/heads/master
| 2023-05-22T22:13:45.934692
| 2022-05-07T05:29:02
| 2022-05-07T05:29:02
| 206,237,129
| 4
| 0
|
MIT
| 2023-05-01T21:15:20
| 2019-09-04T05:17:11
|
Python
|
UTF-8
|
Python
| false
| false
| 105
|
py
|
def coroutine_example():
while True:
x = yield
#do something with x
print x
|
[
"taloleakash@gmail.com"
] |
taloleakash@gmail.com
|
ccf3fe025541d9a45220bd61197b51888b9e6af1
|
09e5cfe06e437989a2ccf2aeecb9c73eb998a36c
|
/base/lib/python2.7/site-packages/wx-3.0-gtk2/wx/tools/XRCed/plugins/xh_gizmos.py
|
4e7490542a4986d421022ee395919f575e443028
|
[
"LicenseRef-scancode-python-cwi",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-free-unknown",
"Python-2.0",
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
jorgediazjr/dials-dev20191018
|
b81b19653624cee39207b7cefb8dfcb2e99b79eb
|
77d66c719b5746f37af51ad593e2941ed6fbba17
|
refs/heads/master
| 2020-08-21T02:48:54.719532
| 2020-01-25T01:41:37
| 2020-01-25T01:41:37
| 216,089,955
| 0
| 1
|
BSD-3-Clause
| 2020-01-25T01:41:39
| 2019-10-18T19:03:17
|
Python
|
UTF-8
|
Python
| false
| false
| 5,971
|
py
|
# Name: gizmos.py
# Purpose: XML handlers for wx.gismos classes
# Author: Roman Rolinsky <rolinsky@femagsoft.com>
# Created: 09.07.2007
# RCS-ID: $Id$
import wx
import wx.xrc as xrc
import wx.gizmos as gizmos
class LEDNumberCtrlXmlHandler(xrc.XmlResourceHandler):
def __init__(self):
xrc.XmlResourceHandler.__init__(self)
# Standard styles
self.AddWindowStyles()
# Custom styles
self.AddStyle('wxLED_ALIGN_LEFT', gizmos.LED_ALIGN_LEFT)
self.AddStyle('wxLED_ALIGN_RIGHT', gizmos.LED_ALIGN_RIGHT)
self.AddStyle('wxLED_ALIGN_CENTER', gizmos.LED_ALIGN_CENTER)
self.AddStyle('wxLED_DRAW_FADED', gizmos.LED_DRAW_FADED)
def CanHandle(self,node):
return self.IsOfClass(node, 'LEDNumberCtrl')
# Process XML parameters and create the object
def DoCreateResource(self):
assert self.GetInstance() is None
w = gizmos.LEDNumberCtrl(self.GetParentAsWindow(),
self.GetID(),
self.GetPosition(),
self.GetSize(),
self.GetStyle())
# wxLED_ALIGN_MASK was incorrect
align = self.GetStyle() & 7
if align: w.SetAlignment(self.GetStyle() & 7)
w.SetValue(self.GetText('value'))
self.SetupWindow(w)
return w
class EditableListBoxXmlHandler(xrc.XmlResourceHandler):
def __init__(self):
xrc.XmlResourceHandler.__init__(self)
# Standard styles
self.AddWindowStyles()
# Custom styles
self.AddStyle('wxEL_ALLOW_NEW', gizmos.EL_ALLOW_NEW)
self.AddStyle('wxEL_ALLOW_EDIT', gizmos.EL_ALLOW_EDIT)
self.AddStyle('wxEL_ALLOW_DELETE', gizmos.EL_ALLOW_DELETE)
def CanHandle(self, node):
return self.IsOfClass(node, 'EditableListBox')
# return self.IsOfClass(node, 'EditableListBox') or \
# self.insideBox and node.GetName() == 'item'
# Process XML parameters and create the object
def DoCreateResource(self):
assert self.GetInstance() is None
w = gizmos.EditableListBox(self.GetParentAsWindow(),
self.GetID(),
self.GetText("label"),
self.GetPosition(),
self.GetSize(),
self.GetStyle(),
self.GetName())
# Doesn't work
#self.insideBox = True
#self.CreateChildrenPrivately(None, self.GetParamNode('content'))
#self.insideBox = False
# Long way
strings = []
n = self.GetParamNode('content')
if n: n = n.GetChildren()
while n:
if n.GetType() != xrc.XML_ELEMENT_NODE or n.GetName() != "item":
n = n.GetNext()
continue
strings.append(n.GetNodeContent())
n = n.GetNext()
w.SetStrings(strings)
self.SetupWindow(w)
return w
class TreeListCtrlXmlHandler(xrc.XmlResourceHandler):
def __init__(self):
xrc.XmlResourceHandler.__init__(self)
# Standard styles
self.AddWindowStyles()
# Custom styles
self.AddStyle('wxTR_DEFAULT_STYLE', wx.TR_DEFAULT_STYLE)
self.AddStyle('wxTR_EDIT_LABELS', wx.TR_EDIT_LABELS)
self.AddStyle('wxTR_NO_BUTTONS', wx.TR_NO_BUTTONS)
self.AddStyle('wxTR_HAS_BUTTONS', wx.TR_HAS_BUTTONS)
self.AddStyle('wxTR_TWIST_BUTTONS', wx.TR_TWIST_BUTTONS)
self.AddStyle('wxTR_NO_LINES', wx.TR_NO_LINES)
self.AddStyle('wxTR_FULL_ROW_HIGHLIGHT', wx.TR_FULL_ROW_HIGHLIGHT)
self.AddStyle('wxTR_LINES_AT_ROOT', wx.TR_LINES_AT_ROOT)
self.AddStyle('wxTR_HIDE_ROOT', wx.TR_HIDE_ROOT)
self.AddStyle('wxTR_ROW_LINES', wx.TR_ROW_LINES)
self.AddStyle('wxTR_HAS_VARIABLE_ROW_HEIGHT', wx.TR_HAS_VARIABLE_ROW_HEIGHT)
self.AddStyle('wxTR_SINGLE', wx.TR_SINGLE)
self.AddStyle('wxTR_MULTIPLE', wx.TR_MULTIPLE)
self.AddStyle('wxTR_EXTENDED', wx.TR_EXTENDED)
def CanHandle(self, node):
return self.IsOfClass(node, 'TreeListCtrl')
# Process XML parameters and create the object
def DoCreateResource(self):
assert self.GetInstance() is None
w = gizmos.TreeListCtrl(self.GetParentAsWindow(),
self.GetID(),
style=self.GetStyle(),
name=self.GetName())
w.AddColumn("Main column")
w.AddColumn('Column 1')
w.SetMainColumn(0)
w.SetColumnWidth(0, 50)
w.SetColumnWidth(1, 50)
root = w.AddRoot('Root')
w.SetItemText(root, "col 1", 1)
item1 = w.AppendItem(root, 'item 1')
w.SetItemText(item1, "col 1", 1)
w.Expand(root)
return w
class DynamicSashWindowXmlHandler(xrc.XmlResourceHandler):
def __init__(self):
xrc.XmlResourceHandler.__init__(self)
# Standard styles
self.AddWindowStyles()
# Custom styles
self.AddStyle('wxDS_MANAGE_SCROLLBARS', gizmos.DS_MANAGE_SCROLLBARS)
self.AddStyle('wxDS_DRAG_CORNER', gizmos.DS_DRAG_CORNER)
def CanHandle(self, node):
return self.IsOfClass(node, 'DynamicSashWindow')
# Process XML parameters and create the object
def DoCreateResource(self):
assert self.GetInstance() is None
w = gizmos.DynamicSashWindow(self.GetParentAsWindow(),
self.GetID(),
self.GetPosition(),
self.GetSize(),
self.GetStyle(),
self.GetName())
self.SetupWindow(w)
return w
|
[
"jorge7soccer@gmail.com"
] |
jorge7soccer@gmail.com
|
fa3dd38cf2d1bec4b0d05019fd7694eb7cf0d6f1
|
7786df1ff60e537760e6e18e5955b1330c57c282
|
/pytorch.py
|
a377420c2804342705f424d81f57d692c900c427
|
[] |
no_license
|
dheidenr/ipavlov_course
|
3186bb064c9f65d3369537288fcb2a2f0062995f
|
2e3765150805d49e1cbeba2fe367616d9437789f
|
refs/heads/master
| 2021-02-11T01:44:15.679322
| 2020-06-04T13:52:35
| 2020-06-04T13:52:35
| 244,439,172
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 85
|
py
|
import torch
print('hello')
a = 10
b = a
b = 2
print(b)
a = (1, 2, 3)
b = list(a)
|
[
"work-post@bk.ru"
] |
work-post@bk.ru
|
a7589f94310be332d65680e35115af8b6f847f08
|
db869da1bcbc5584a3af8c109656b3ef9710ad83
|
/tools/colored-hangul-image-generator.py
|
137d432b1f0c29d25b94fb168d7f0cc4dcf457c6
|
[] |
no_license
|
lionem2018/Skeletonization-for-color-character
|
b70dabf669bfecc2dd5d7d53784c0554e8c2e080
|
17cd311db3ca8168eca2c275e5fec02469017aed
|
refs/heads/master
| 2020-05-27T01:32:37.325491
| 2019-06-18T08:37:39
| 2019-06-18T08:37:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,786
|
py
|
#!/usr/bin/env python
import argparse
import glob
import io
import os
import random
import numpy
from PIL import Image, ImageFont, ImageDraw
from scipy.ndimage.interpolation import map_coordinates
from scipy.ndimage.filters import gaussian_filter
SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
# Default data paths.
DEFAULT_LABEL_FILE = os.path.join(SCRIPT_PATH,
'../labels/2350-common-hangul.txt')
DEFAULT_FONTS_DIR = os.path.join(SCRIPT_PATH, '../fonts')
DEFAULT_OUTPUT_DIR = os.path.join(SCRIPT_PATH, '../image-data')
# Number of random distortion images to generate per font and character.
DISTORTION_COUNT = 3
# Width and height of the resulting image.
IMAGE_WIDTH = 64
IMAGE_HEIGHT = 64
def generate_hangul_images(label_file, fonts_dir, output_dir):
"""Generate Hangul image files.
This will take in the passed in labels file and will generate several
images using the font files provided in the font directory. The font
directory is expected to be populated with *.ttf (True Type Font) files.
The generated images will be stored in the given output directory. Image
paths will have their corresponding labels listed in a CSV file.
"""
with io.open(label_file, 'r', encoding='utf-8') as f:
labels = f.read().splitlines()
image_dir = os.path.join(output_dir, 'hangul-images-color')
if not os.path.exists(image_dir):
os.makedirs(os.path.join(image_dir))
# Get a list of the fonts.
fonts = glob.glob(os.path.join(fonts_dir, '*.ttf'))
labels_csv = io.open(os.path.join(output_dir, 'labels-map.csv'), 'w',
encoding='utf-8')
total_count = 0
prev_count = 0
for character in labels:
# Print image count roughly every 5000 images.
if total_count - prev_count > 5000:
prev_count = total_count
print('{} images generated...'.format(total_count))
for font in fonts:
total_count += 1
image = Image.new('RGB', (IMAGE_WIDTH, IMAGE_HEIGHT), color=0) # change image color format from binary('L') to RGB
font = ImageFont.truetype(font, 48)
drawing = ImageDraw.Draw(image)
w, h = drawing.textsize(character, font=font)
drawing.text(
((IMAGE_WIDTH-w)/2, (IMAGE_HEIGHT-h)/2),
character,
fill='#0000FF', # draw character with blue('#0000FF)
font=font
)
# change some part of character to red and green
# 글자의 특정 부분을 빨간색이나 초록색으로 변경
pixels = image.load()
# change to red
# 빨간색으로 변경
for i in range(0, 31):
for j in range(0, 31):
if pixels[i, j] > (0, 0, 128):
pixels[i, j] = (255, 0, 0)
# change to green
# 초록색으로 변경
for i in range(32, image.size[0]):
for j in range(32, image.size[1]):
if pixels[i, j] > (0, 0, 128):
pixels[i, j] = (0, 255, 0)
file_string = '{}.png'.format(total_count)
file_path = os.path.join(image_dir, file_string)
image.save(file_path, 'PNG')
labels_csv.write(u'{},{}\n'.format(file_path, character))
# for i in range(DISTORTION_COUNT):
# total_count += 1
# file_string = 'hangul_{}.png'.format(total_count)
# file_path = os.path.join(image_dir, file_string)
# arr = numpy.array(image)
# distorted_array = elastic_distort(
# arr, alpha=random.randint(30, 36),
# sigma=random.randint(5, 6)
# )
# distorted_image = Image.fromarray(distorted_array)
# distorted_image.save(file_path, 'PNG')
# labels_csv.write(u'{},{}\n'.format(file_path, character))
print('Finished generating {} images.'.format(total_count))
labels_csv.close()
def elastic_distort(image, alpha, sigma):
"""Perform elastic distortion on an image.
Here, alpha refers to the scaling factor that controls the intensity of the
deformation. The sigma variable refers to the Gaussian filter standard
deviation.
"""
random_state = numpy.random.RandomState(None)
shape = image.shape
dx = gaussian_filter(
(random_state.rand(*shape) * 2 - 1),
sigma, mode="constant"
) * alpha
dy = gaussian_filter(
(random_state.rand(*shape) * 2 - 1),
sigma, mode="constant"
) * alpha
x, y = numpy.meshgrid(numpy.arange(shape[0]), numpy.arange(shape[1]))
indices = numpy.reshape(y+dy, (-1, 1)), numpy.reshape(x+dx, (-1, 1))
return map_coordinates(image, indices, order=1).reshape(shape)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--label-file', type=str, dest='label_file',
default=DEFAULT_LABEL_FILE,
help='File containing newline delimited labels.')
parser.add_argument('--font-dir', type=str, dest='fonts_dir',
default=DEFAULT_FONTS_DIR,
help='Directory of ttf fonts to use.')
parser.add_argument('--output-dir', type=str, dest='output_dir',
default=DEFAULT_OUTPUT_DIR,
help='Output directory to store generated images and '
'label CSV file.')
args = parser.parse_args()
generate_hangul_images(args.label_file, args.fonts_dir, args.output_dir)
|
[
"lionem2018@gmail.com"
] |
lionem2018@gmail.com
|
8eb8b78a3c73b4e6636cc5e1e1f54ed35cbe219d
|
2a9dff1294b9ee2eec1195d41b7d861206ea6f5d
|
/clean/old/main_proto.py
|
17178770bfed6e29735c7ab231132251a4fed2e2
|
[] |
no_license
|
HanEmile/Heidelberg
|
a4dce19d5542a77ec417ee30ca8e715a6d227bf2
|
a64a41ff5c15b433d6b89d6014f33ec7ef91c146
|
refs/heads/master
| 2020-12-02T21:20:22.783224
| 2017-08-14T17:06:25
| 2017-08-14T17:06:25
| 96,296,221
| 0
| 2
| null | 2017-07-12T15:02:48
| 2017-07-05T08:30:22
|
Python
|
UTF-8
|
Python
| false
| false
| 2,788
|
py
|
import math
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from scipy.interpolate import InterpolatedUnivariateSpline
import os
from multiprocessing import Pool
# custom imports
import heidelberg as hd
import variables
import constants
#####
# README!
# generate eaven distibuted coordinates and insert them into the rho function
# to get a rho-value. Compare that value to a random value to see if a star
# should be generated
#####
# controll
samples = int(1e3)
stars = int(1e3)
# lists / arrays
lista = []
listb = []
arr_rand_pos = np.zeros((stars, 3))
# create new file for every calculation
file_nr = 1
while os.path.isfile('3D/3D_data/' + str(file_nr) + '.csv') == True:
file_nr += 1
path = '3D/3D_data/' + str(file_nr) + '.csv'
print(path)
def gen_star(star_nr):
# create temporary array to store the data (-> multiprocessing)
temp_arr = np.zeros((3))
# generate a random coordinate for every axis and stor it in an array
for axis in range(0, 3):
rand_val = np.random.uniform(int(0), int(1e15), size=1)
arr_rand_pos[star_nr][axis] = rand_val
# define x, y and z for better access
x = arr_rand_pos[star_nr][0]
y = arr_rand_pos[star_nr][1]
z = arr_rand_pos[star_nr][2]
# write the random values to the temporary array
temp_arr[0] = x
temp_arr[1] = y
temp_arr[2] = z
# r = math.sqrt(x**2 + y**2 + z**2)
# lista.append(hd.rho(r))
# generate a random "check" number to determine if a star should be drawn or not
rand_val_check = np.random.uniform(0, 1500, size=1)
# print some information
print("{:<20}{:<20}{:<20}".format(x, y, z), end="")
print("{:<20}{:<20}".format( str(hd.rho3d(x, y, z)), str(rand_val_check) ), end="")
print("{:<20}{:<20}".format(r, rho_r))
# test if the star should be drawn
if rand_val_check < rho_r:
# write the coordinates of the star to a file
with open(path, "a") as data:
data.write( str(temp_arr[0]).strip("[]") + "," )
data.write( str(temp_arr[1]).strip("[]") + "," )
data.write( str(temp_arr[2]).strip("[]") )
data.write("\n")
# generate the stars
for star in range(0, stars):
gen_star(star)
# print the collumn info so you know what is what
print("{:<20}{:<20}{:<20}".format("x", "y", "z"), end="")
print("{:<20}{:<20}{:<20}{:<20}".format("hd.rho3d(x, y, z)", "rand_val_check", "r", "hd.rho(r)"))
print("")
# # DISPLAY ARRAY R
# arr_rho = np.logspace(0, 10, int(1e5))
#
# for r in arr_r:
# print(r, end="")
# print(hd.rho(r))
# listb.append(hd.rho(r))
#
# # plot the lists
# plt.plot(lista)
# plt.plot(listb)
#
# # configure the plot
# plt.xscale('log')
# plt.legend(["lista", "listb"])
#
# # display the plot
# plt.show()
|
[
"emile.hansmaennel@gmail.com"
] |
emile.hansmaennel@gmail.com
|
30562d714d8e90497a2db796240e4ac888916f25
|
717442f6b0b497ea18c37de75f5ce5dac4768aef
|
/scrapers/scrapers/spiders/lydias_spider.py
|
e966c68fb38f1e1161a44f8eaa4068c8a62a909c
|
[] |
no_license
|
marjevtic/testMarko
|
6c327ec7286ee0ece21c0d237bae861dac91d45f
|
336f1b35c99c4521ea833337a558ab0d2aa1222d
|
refs/heads/master
| 2021-01-22T04:48:42.197965
| 2012-09-10T07:39:28
| 2012-09-10T07:39:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 21,173
|
py
|
from scrapy.contrib.spiders import CrawlSpider
from scrapy.selector import HtmlXPathSelector
import modules.basic_func as basic
from modules.zmags_xml import VariantsXml
from modules.excel import DictExcel
from modules.exception import ZmagsException
from modules.terminal import DatabaseTerminal
from project_modules.lydias import LydiasItem
from modules.export_to_db import CommonExport
from modules.database import Database
import project_modules.lydias.lydias as lydias
import hashlib
import urllib2
import simplejson
import sys
import re
import os
from scrapy.xlib.pydispatch import dispatcher
from scrapy import signals
from scrapy.conf import settings
class LydiasSpider(CrawlSpider):
name = "lydias"
allowed_domains = ["example.com"]
start_urls = ["http://www.example.com"]
counter = 0
def __init__(self, *a, **kw):
super(LydiasSpider, self).__init__(*a, **kw)
dispatcher.connect(self.spider_closed, signals.spider_closed)
terminal = DatabaseTerminal(sys.argv, self.name)
self.d = terminal.get_arguments()
self.xml = VariantsXml()
self.exc = ZmagsException(5)
if self.d['database']:
self.database = Database()
self.database.connect()
self.products, self.no_urls = self.database.select_products(self.d['catalog_id'],
self.d['product_id'])
self.database.disconnect()
else:
self.get_lists_from_excel()
# fix for bug with links they provide
self.products['urls'] = basic.cut_string_field(self.products['urls'], "&cat=")
self.handle_not_provided()
self.start_urls = self.products['urls']
self.images_store = "/" + settings['IMAGES_STORE']
lydias.add_properties(self.xml)
self.total = len(self.products['urls'])
def parse(self, response):
self.counter += 1
basic.print_status(self.counter, self.total)
hxs = HtmlXPathSelector(response)
item = LydiasItem()
if 'redirect_urls' in response.request.meta:
cur_url = response.request.meta['redirect_urls'][0]
else:
cur_url = response.url
index = self.products['urls'].index(cur_url)
id = self.products['product_ids'][index]
try:
available = hxs.select('//div[@id="searchfor"]/text()').extract()
if not available:
item['product_id'] = [id]
item['name'], item['price'], item['old_price'], item['description'] = self.get_basic_info(hxs)
item['rating'], item['custom_rating'] = self.get_rating(hxs)
chart = self.absolute_path(self.get_size_image(hxs))
item['sizes_chart_image_url'] = self.get_server_path(chart)
color_urls, color_names, product_image, color_codes = self.get_image_swatches(hxs)
color_urls = self.absolute_path(color_urls)
item['color_image_url'] = self.make_colors_json(color_urls, color_names, color_codes)
item['in_stock'] = ["IN_STOCK"]
item['embroidery'] = self.get_embroidery(hxs)
default_images = self.absolute_path(self.get_extra_images(hxs))
item['default_image_url'] = self.get_server_path(default_images)
self.xml.create_xml(item)
product_image = self.absolute_path(product_image)
self.create_subproducts(id, color_names, product_image, color_codes, hxs)
item['image_urls'] = product_image + color_urls + chart + default_images
self.products['status'][index] = "ran"
else:
self.exc.code_handler(102, response.url)
item['product_id'] = [id]
item['in_stock'] = ["NOT_AVAILABLE"]
self.products['status'][index] = "not_avail"
self.xml.create_xml(item)
except:
self.products['status'][index] = "error"
self.exc.code_handler(100, response.url)
return item
# function for checking if product has embroidery or not
def get_embroidery(self, hxs):
page = hxs.select('//html').extract()[0]
if "document.getElementById('logocolor').disabled = true;" in page:
return ["True"]
else:
return ["False"]
# function for creating json with all information for colors
def make_colors_json(self, color_urls, color_names, color_codes):
dict = {}
jsons = []
for i in range(0, len(color_urls)):
dict['color_url'] = self.get_server_path_single(color_urls[i])
dict['color_name'] = color_names[i]
dict['color_short'] = color_codes[i]
json = basic.cdata(simplejson.dumps(dict))
jsons.append(json)
return jsons
# function for getting image server path
def get_server_path_single(self, url):
# return url
return self.images_store + "/full/" + hashlib.sha1(url).hexdigest() + ".jpg"
# function for getting image path for field of images
def get_server_path(self, urls):
# return urls
new = []
for url in urls:
new.append(self.images_store + "/full/" + hashlib.sha1(url).hexdigest() + ".jpg")
return new
#function for getting basic information for product
def get_basic_info(self, hxs):
name = hxs.select('//div[@id="proddetail"]/h1/text()').extract()
price = hxs.select('//div[@id="proddetail"]/div[@class="yourprice bigprice"]/text()').extract()
description = basic.cdata(hxs.select('//div[@id="details"]').extract()[0])
description = basic.clean_string(description)
old_price = hxs.select('//span[@class="yourprice_product"]/text()').extract()
if not price:
price = hxs.select('//span[@id="PriceDisplay"]/text()').extract()
if old_price:
old_price = [re.sub('[^0-9.]', '', old_price[0])]
price = [re.sub('[^0-9.]', '', price[0])]
return name, price, old_price, [description]
# function for getting rating, both number and sentence (e.g. Rating 5 out of 6 votes)
def get_rating(self, hxs):
temp = hxs.select('//div[@id="Customerssay"]/p[2]/text()').extract()
if temp:
rating = basic.get_middle_text(temp[0].replace(" ", ""), "Rating:", "out")
return rating, temp
else:
return [], temp
#function for getting reviews, returning rating and field of json reviews
# or empty fields if there's no reviews
def get_reviews(self, hxs):
reviews = hxs.select('//div[@class="prodReview"]')
if reviews:
title = reviews[0].select('p[@class="review_title"]/text()').extract()
text = reviews[0].select('p[@class="review_text"]/text()').extract()
author = reviews[0].select('p[@class="review_author"]/text()').extract()
location = reviews[0].select('p[@class="review_location"]/text()').extract()
jsons = self.make_reviews_json(title, text, author, location)
return jsons
else:
return []
# function for making json for reviews
# currently not in use. cause there are no reviews in DPW design
def make_reviews_json(self, title, text, author, location):
jsons = []
print len(title)
print len(text)
print len(author)
print len(location)
os._exit(0)
for i in range(0, len(title)):
json = '{ "title" : " %s ", "text" : "%s", "author" : "%s", "location" :\
"%s" }' % (title[i], text[i], author[i], location[i])
json = basic.cdata(json)
jsons.append(json)
return jsons
#function for getting size chart image
def get_size_image(self, hxs):
temp = hxs.select('//div[@class="TabbedPanelsContent cells"]/img/@src').extract()
return temp
#function for getting image swatches, returning fields (image_urls, image name, product color image)
def get_image_swatches(self, hxs):
colors = hxs.select('//div[@class="lolite"]')
color_images = []
color_names = []
products_image = []
color_codes = []
for color in colors:
color_images.append(color.select('a/img/@src').extract()[0])
color_names.append(color.select('a/img/@alt').extract()[0])
#if zoom image needed, this is the place to get it
products_image.append(color.select('a/@rev').extract()[0])
color_codes.append(color.select('a/@onclick').extract()[0].split(",")[1].replace("'", ""))
return color_images, color_names, products_image, color_codes
#function for getting additional images, returns field of images or empty field if there is no
def get_extra_images(self, hxs):
additional_images = hxs.select('//div[@id="AddImg"]/script/text()').extract()
if additional_images:
temp = basic.get_middle_text(additional_images[0], '"', '"')
thumb_images = temp[0].split(",")
return thumb_images
else:
return []
#function for getting product id from the page
def get_product_id(self, hxs):
temp = hxs.select('//div[@id="wrap"]/script/text()').extract()
id = basic.get_middle_text(temp[0], 'productid","', '"')
return id[0]
# function for getting sizes from another url, retunrning field of jsons for sizes
# one id from the page is 115NB, if needed here to hardcode for testing
# currently not in use
def get_sizes(self, id, hxs):
showmode = hxs.select('//input[@name="showmode"]/@value').extract()[0]
itemmode = hxs.select('//input[@name="itemmode"]/@value').extract()[0]
salemode = hxs.select('//input[@name="salemode"]/@value').extract()[0]
url = "http://www.lydiasuniforms.com/ajaxed/product-showoptions.asp?sku=%s&opt1=AV&opt2=-1&type2=l1type" % (id)
url += "&type3=&showmode=%s&itemmode=%s&salemode=%s&rnum=429" % (showmode, itemmode, salemode)
jsons = []
print "reading page..."
page = urllib2.urlopen(url).read()
print "page read"
page = page.replace("'", "")
page = page.replace("[", ",")
page = page.replace(",,", "")
temp = page.split("]")
for i in range(0, len(temp) - 2):
tmp = temp[i].split(",")
json = '{ "size_short" : " %s ", "size_full" : "%s", "some_number" :\
"%s", "some_id" : "%s" }' % (tmp[0], tmp[1], tmp[2], tmp[3])
json = basic.cdata(json)
jsons.append(json)
return jsons
# function that handles creating subproducts, can be implemented for the usual way product for every combination
# of size and color if needed
def create_subproducts(self, id, color_names, product_image, color_codes, hxs):
item = LydiasItem()
# if no colors for specific product do this part and call to creating size children with empty string instead
# of actual color name
if len(color_names) == 0:
item['master_product_id'] = [id]
item['product_id'] = [id + "_" + "0"]
item['color'] = ["NO_COLOR"]
item['custom_size'] = self.create_sizes_subproducts(id, id + "_" + "0", "", hxs)
self.xml.create_xml(item)
# for handling cases when there are color options for specific product, create child for every color, and call
# for creating size children for every provided color
else:
for i in range(0, len(color_names)):
print "name :" + color_names[i] + " code:" + color_codes[i]
item['master_product_id'] = [id]
item['product_id'] = [id + "_" + str(i)]
item['color'] = [color_names[i]]
item['color_short'] = [color_codes[i]]
item['normal_image_url'] = self.get_server_path([product_image[i]])
item['in_stock'] = ["IN_STOCK"]
item['custom_size'] = self.create_sizes_subproducts(id, id + "_" + str(i), color_codes[i], hxs)
self.xml.create_xml(item)
item.clear()
return 0
# function for creating child products for sizes
# little messy with all the commented lines but those lines can be used if needed to go back to old way with
# child products instead of json
def create_sizes_subproducts(self, main_id, id, color_code, hxs):
print color_code
jsons = []
# if block for cases when color is provided
if color_code != "":
showmode = hxs.select('//input[@name="showmode"]/@value').extract()[0]
itemmode = hxs.select('//input[@name="itemmode"]/@value').extract()[0]
salemode = hxs.select('//input[@name="salemode"]/@value').extract()[0]
url = "http://www.lydiasuniforms.com/ajaxed/product-showoptions.asp?sku=%s&opt1=%s&opt2=-1&type2=l1type&" \
"type3=&showmode=%s&itemmode=%s&salemode=%s&rnum=193" % (main_id, color_code, showmode, itemmode, salemode)
page = urllib2.urlopen(url).read()
page = page.replace("'", "")
page = page.replace("[", ",")
page = page.replace(",,", "")
temp = page.split("]")
for i in range(0, len(temp) - 2):
tmp = temp[i].split(",")
item = {}
# item['master_product_id'] = [id]
item['size_short'] = tmp[0]
item['price_url'] = self.get_size_price(str(main_id), str(color_code), tmp[0])
item['size'] = tmp[1]
# item['product_id'] = [id + "_" + str(i)]
# item['in_stock'] = ["IN_STOCK"]
# xml.create_xml(item)
jsons.append(basic.cdata(simplejson.dumps(item)))
return jsons
# when the color is not provided different block of code cause it's done differently on the page
else:
temp = hxs.select('//div[@class="not_size"]/text()').extract()
for i in range(0, len(temp)):
item = {}
# item['master_product_id'] = [id]
# item['product_id'] = [id + "_" + str(i)]
item['size_short'] = temp[i]
item['price_url'] = self.get_size_price(str(main_id), "", temp[i])
# item['in_stock'] = ["IN_STOCK"]
# xml.create_xml(item)
jsons.append(basic.cdata(simplejson.dumps(item)))
return jsons
# return 0
# function for getting price for combination of every size and color, can return url where the price is, or can
# parse that url to get that actual price but will drastically increase scraping time
def get_size_price(self, id, color, size):
if color != "":
url = "http://www.lydiasuniforms.com/ajaxed/product-showprice.asp?sku=%s %s %s&qty=1&itemmode=" \
"0&showmode=1&rnum=388" % (str(id), str(color), size)
else:
url = "http://www.lydiasuniforms.com/ajaxed/product-showprice.asp?sku=%s %s&qty=1&itemmode=" \
"0&showmode=1&rnum=259" % (id, size)
url = url.replace(" ", "%20")
return url
# just adding part for getting absolute paths for relative paths from page
def absolute_path(self, urls):
new = []
for i in urls:
new.append("http://www.lydiasuniforms.com" + i)
return new
# function used for gettin embroidery information from clients page, was used only once to get it
# cause embroidery is the same for all the products
def get_emb(self, hxs):
emb = hxs.select('//div[@id="emb"]').extract()
lettering_colors = hxs.select('//select[@id="threadcolor"]/option/@value').extract()
urls = []
d = {}
colors = []
for i in range(1, len(lettering_colors)):
d['type'] = "lettering colors"
d['name'] = lettering_colors[i]
url = "http://www.lydiasuniforms.com/images/lydias/threadcolor_"
url += lettering_colors[i].lower().replace(' ', '_') + ".gif"
d['url'] = self.get_server_path_single(url)
urls.append(url)
colors.append(basic.cdata(simplejson.dumps(d)))
lettering = hxs.select('//select[@id="lettering"]/option/@value').extract()
l = {}
letterings = []
for i in range(1, len(lettering)):
l['type'] = "lettering"
l['name'] = lettering[i]
url = "http://www.lydiasuniforms.com/images/lydias/lettering_"
url += lettering[i].lower().replace(' ', '_') + ".gif"
l['url'] = self.get_server_path_single(url)
letterings.append(basic.cdata(simplejson.dumps(l)))
urls.append(url)
logo = hxs.select('//select[@id="logoname"]/option/@value').extract()
logos = {}
log = []
for i in range(1, len(logo)):
logos['type'] = "logo"
logos['name'] = logo[i]
url = "http://www.lydiasuniforms.com/images/logos/"
url += logo[i].lower() + ".jpg"
logos['url'] = self.get_server_path_single(url)
urls.append(url)
log.append(basic.cdata(simplejson.dumps(logos)))
item = LydiasItem()
item['color'] = colors
item['lettering'] = letterings
item['log'] = log
xml.create_xml(item)
xml.write_xml("emb")
return urls
print colors, letterings, log
os._exit(0)
def handle_not_provided(self):
item = LydiasItem()
for n in self.no_urls['product_ids']:
item['product_id'] = [n]
index = self.no_urls['product_ids'].index(n)
item['name'] = [self.no_urls['names'][index]]
item['in_stock'] = ['NOT_AVAILABLE']
self.xml.create_xml(item)
def spider_closed(self, spider):
"""Handles spider_closed signal from end of scraping.
Handles usual end operations for scraper like writing xml, exporting
to database and sending appropriate mail message."""
msg = ""
if self.counter < self.total:
msg += "\nScraper didn't go through all products, please report"
msg += "\n\nScraped %d product out of %d\n\n" % (self.counter, self.total)
# filename for writing xml
if self.d['database']:
try:
self.database.connect()
filename = self.database.get_name(self.d['catalog_id'])
self.database.update_db(self.products)
self.database.disconnect()
msg += "\nRan from interface.\n"
except:
msg += "\nUpdating database failed, please report."
else:
msg += "\nRan from console.\n"
filename = self.d['file']
self.xml.write_xml(self.name, filename)
msg += self.exc.create_message(self.counter)
#if self.d['upload']:
#exp = CommonExport()
#try:
#exp.xml_to_db(self.name, filename, "4b0d6b52-7b05-4e54-9d87-dfe77ac270c9")
#msg += "\n\nExport to database successful"
#except StandardError:
#msg += "\n\nExport to database failed"
#else:
#msg += "\n\nUpload to database not selected"
## part for exporting to database here
from modules.mail import Mail
mail = Mail()
try:
mail.send_mail(msg, "Lydias: {0}".format(filename))
except:
msg += "\nSending mail failed."
if self.d['database']:
path = "logs/{0}".format(self.name)
if not os.path.exists(path):
os.makedirs(path)
with open("{0}/{1}".format(path, filename), 'w') as f:
f.write(msg)
def get_lists_from_excel(self):
xls = DictExcel(basic.get_excel_path(self.name, self.d['file']))
self.products = dict()
try:
self.products['urls'] = xls.read_excel_collumn_for_urls(3, 15)
self.products['product_ids'] = xls.read_excel_collumn_for_ids(1, 15)
self.products['names'] = xls.read_excel_collumn(2, 15)
except IOError as e:
msg = "I/O error {0}: {1}".format(e.errno, e.strerror)
msg += "\nError occurred for given file: {0}".format(self.d['file'])
self.exc.code_handler(103, msg=msg)
except StandardError:
msg = "Error reading excel file"
msg += "\nError occurred for given file: {0}".format(self.d['file'])
self.exc.code_handler(103, msg=msg)
else:
self.products = xls.delete_duplicates_dict(self.products)
self.products, self.no_urls = xls.separate_no_urls(self.products)
self.products = xls._add_none_status(self.products)
self.no_urls = xls._add_none_status(self.no_urls)
|
[
"mjevtic@extensionengine.com"
] |
mjevtic@extensionengine.com
|
1e47e3d1f3d6edfccee973842260a0ec8c1f4e93
|
3e3741d9ea06f1dcd560e27145256bd3177bed14
|
/04_爬虫/week2/day05/02useragent.py
|
65cea4e5ead7b25600a44ab4929ef7260b41121d
|
[] |
no_license
|
Lousm/Python
|
778bc730db09ab135bf53c7b62af29df2407199a
|
d3f19600012b3576cd5d58df510c17590fcaec14
|
refs/heads/master
| 2020-03-26T16:40:01.188306
| 2018-11-06T03:56:20
| 2018-11-06T03:56:20
| 145,116,187
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 103
|
py
|
from fake_useragent import UserAgent
ua = UserAgent()
print(ua.ie)
print(ua.chrome)
print(ua.google)
|
[
"mr_lousm@163.com"
] |
mr_lousm@163.com
|
ea6f1fcff5a5681c6f1b5caf3bb61ce8170f5177
|
4e21181a535165d85d63aa9878583f1365143a49
|
/parser.py
|
be8b1558f444ec2b66bff4a9d26820e2063cb6a2
|
[] |
no_license
|
redbassett/Jump
|
6c649d32f8785b92a88e06692718dd79fde74407
|
482a054b1b9606ba770217fb1a714b6dabb66bab
|
refs/heads/master
| 2021-01-13T02:18:19.568135
| 2012-04-30T14:34:02
| 2012-04-30T14:34:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 601
|
py
|
#!/usr/bin/env python
import os
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_DIR = os.path.join(ROOT_DIR, "data")
name = 'slicelevel'
path = os.path.join(DATA_DIR, name) + '.lvl'
with open(path, 'r') as f:
data = f.read().strip().split('\n')
#data = [[self._map.get(c) for c in row] for row in data]
blocks = []
lethals = []
rowNum = 0
for row in data:
cNum = 0
for c in row:
if c == '.':
blocks.append((cNum,rowNum))
elif c == '!':
lethals.append((cNum,rowNum))
cNum += 20
rowNum += 20
print blocks
print lethals
|
[
"redbassett@gmail.com"
] |
redbassett@gmail.com
|
3bebefafd93efc9e06e2a2522f2a259352ae32b7
|
1110dc34c9d7e68ff60fc4d2b35c34c7f09bb015
|
/utils.py
|
46e586aa50323d85aba1074eba6914bf06df63f7
|
[
"MIT"
] |
permissive
|
rienafairefr/light-automation
|
52ad88786471062639e3a6e31808fefdf61bc2fe
|
d782523db38d5f1f5832626aa5352c5ad7cae36e
|
refs/heads/master
| 2021-05-08T09:17:15.749340
| 2017-10-18T13:52:21
| 2017-10-18T13:52:21
| 107,105,705
| 0
| 0
| null | 2017-10-16T09:21:05
| 2017-10-16T09:21:05
| null |
UTF-8
|
Python
| false
| false
| 283
|
py
|
from random import randint
def random6():
regs_values = [0] * 23
indices = []
while len(indices) < 6:
val = randint(0, 22)
print val
if val not in indices:
indices.append(val)
regs_values[val] = 100
return regs_values
|
[
"(none)"
] |
(none)
|
44f97dd07f1884ec7681c265df7b56cb53d814f1
|
5e11802ab90a382a711845951662f4f3df112bf1
|
/mc/cooperatives/migrations/0005_auto_20140930_1939.py
|
540d073303c3ff19a300c22d56853c5941138ed5
|
[] |
no_license
|
banquito/dj_mundo_cooperativo
|
464327c95b2875da761456c5a4047ad7ba81e117
|
774557e6ea7a2f433c9b10440c77e12c910d6493
|
refs/heads/master
| 2016-09-05T13:57:23.712432
| 2015-06-08T21:14:58
| 2015-06-08T21:14:58
| 24,514,517
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,403
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cooperatives', '0004_auto_20140930_1832'),
]
operations = [
migrations.CreateModel(
name='Assembly',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('fecha_de_convocatoria', models.DateTimeField(verbose_name=b'date')),
('notificacion', models.BooleanField()),
('direccion', models.CharField(max_length=200)),
('localidad', models.CharField(max_length=200)),
('codigo_postal', models.CharField(max_length=200)),
('fecha_inicio_asamblea', models.DateTimeField(verbose_name=b'date')),
('fecha_fin_asamblea', models.DateTimeField(verbose_name=b'date')),
('fecha_inicio_consejo', models.DateTimeField(verbose_name=b'date')),
('fecha_fin_consejo', models.DateTimeField(verbose_name=b'date')),
('cooperative', models.ForeignKey(to='cooperatives.Cooperative')),
('miembros_consejo_1', models.OneToOneField(related_name=b'consejo_1', to='cooperatives.Partner')),
('miembros_consejo_2', models.OneToOneField(related_name=b'consejo_2', to='cooperatives.Partner')),
('miembros_consejo_3', models.OneToOneField(related_name=b'consejo_3', to='cooperatives.Partner')),
('miembros_consejo_presidente', models.OneToOneField(related_name=b'consejo_presidente', to='cooperatives.Partner')),
('miembros_consejo_secretario', models.OneToOneField(related_name=b'consejo_secretario', to='cooperatives.Partner')),
('miembros_consejo_tesorero', models.OneToOneField(related_name=b'consejo_tesorero', to='cooperatives.Partner')),
('precide_asamblea', models.OneToOneField(related_name=b'precide', to='cooperatives.Partner')),
('sindico_suplente', models.OneToOneField(related_name=b'suplente', to='cooperatives.Partner')),
('sindico_titular', models.OneToOneField(related_name=b'sindico', to='cooperatives.Partner')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='cooperative',
name='capital_banco',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='capital_direccion',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='capital_fecha',
field=models.DateTimeField(null=True, verbose_name=b'date', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='capital_monto',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='capital_numero',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='cuit',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='expediente',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='matricula',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='cooperative',
name='presentacion',
field=models.DateTimeField(null=True, verbose_name=b'date', blank=True),
preserve_default=True,
),
]
|
[
"tiko2015@gmail.com"
] |
tiko2015@gmail.com
|
fcb11cc2d7e9117ffe7eb3f979746f69470c5c21
|
ee00f05260720d38fe2b5942fec050299d7e7409
|
/app/__init__.py
|
22158ee41e93cd0747ce31131003612b54bf2ce0
|
[] |
no_license
|
atjessehill/Assignment-5
|
be7c55cd0d24d473b2211bc043a45783c75c9f22
|
0f1a65fb9dda5b5b749ab2ee77d3874f3d6cf435
|
refs/heads/master
| 2020-03-22T14:50:16.772752
| 2018-07-09T02:27:29
| 2018-07-09T02:27:29
| 140,209,515
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 754
|
py
|
from flask import Flask, jsonify
import json
data = {"1": "Jesse", "2": "Jonathan", "3": "Luisa", "4": "Ana Maria", "5": "James"}
app = Flask(__name__)
def get_all_customers():
return data
def get_customer_by_id(id):
for key, value in data.items():
if key == id:
print(key)
print(value)
print(id)
return value
return "No User with that ID"
@app.route("/")
def hello_world():
return "Hello, World!"
@app.route('/customers', methods=['GET'])
def return_customers():
d = get_all_customers()
return jsonify(d)
@app.route('/customers/<id>', methods=['GET'])
def return_one_customer(id):
d = get_customer_by_id(id)
return d
if __name__== "__main__":
app.run()
|
[
"j_hill14@u.pacific.edu"
] |
j_hill14@u.pacific.edu
|
260f5017feb5cabc9ecfe4f0f55bf860eda03bef
|
6320f2d56bd8fe12196fb1547f2b3553caaf7483
|
/Prac2_Ejer_6.py
|
962e86f7b37fa5796e6a040e8dbd33d950b5878d
|
[
"MIT"
] |
permissive
|
TomasFisica/Redes_Prac_4
|
8d15bfb620a677ec22fbdfd2da899ebfc197e539
|
fa594f088b2089ef789e014f564548388b4954c4
|
refs/heads/master
| 2022-10-22T13:16:50.083692
| 2020-06-10T18:24:48
| 2020-06-10T18:24:48
| 265,703,928
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,669
|
py
|
import numpy as np
from matplotlib import pyplot as plt
class Neu_tgh():
"Neurona cuya funcion de activacion es la tangente hiperbolica"
def __init__(self,num_entrada):
self.num_entr=num_entrada #Numero de entradas más el bais
self.Pesos=np.random.randn(self.num_entr+1,1)
self.Grad_Local=0
self.Entrada_arr=0
self.Salida=0
def Fordware(self,Entradas):
self.Entrada_arr=np.array(Entradas)
self.Entrada_arr=np.append(self.Entrada_arr,1) #Agrego bais
self.Entrada_arr=self.Entrada_arr.reshape([1,self.num_entr+1])
self.Salida=self.Entrada_arr@self.Pesos
return float(np.tanh(self.Salida))
def Glocal(self,grad_ext):
self.Grad_Local=1-float(np.square(np.tanh(self.Salida)))
self.Grad_Local*=grad_ext
def Backpropagation(self, grad_ext):
self.Glocal(grad_ext)
Back=self.Grad_Local*self.Pesos.T[:,:self.num_entr]
self.Pesos-=(self.Entrada_arr.T*self.Grad_Local)*1e-1
return Back
def Loss_test(N11,N12,N21,N22,N33,xtrain,ytrain):
"""Determinar loss y accc para la primera arquitectura"""
Score1=0;Score2=0;Score3=0;loss=0;acierto=0
for i in range(100):
Score1=[N11.Fordware(xtrain[i][0]),N12.Fordware(xtrain[i][1])]
Score2=[N21.Fordware(Score1),N22.Fordware(Score1)]
Score3=N33.Fordware(Score2)
loss+=np.square(ytrain[i]-Score3) #Loss de MSE
if (Score3>0 and ytrain[i]>0)or (Score3<0 and ytrain[i]<0):
acierto+=1
return float(loss)/100,acierto
def Loss_test2(N211,N212,N222,N233,xtrain,ytrain):
"""Determinar loss y acc para la segunda arquitectura"""
Score1=0;Score2=0;Score3=0;loss=0;acierto=0
for i in range(100):
Score1=[N2_11.Fordware(xtrain[i][0]),N212.Fordware(xtrain[i][1])]
Score2=[N222.Fordware(Score1)]
Score1.append(Score2[0])
Score3=N233.Fordware(Score1)
"""Determino la loss del entrenamiento"""
loss=np.square(ytrain[i]-Score3) #Loss de MSE
if (Score3>0 and ytrain[i]>0)or (Score3<0 and ytrain[i]<0):
acierto+=1
return float(loss)/100,acierto
"""Variables a utilizar"""
x_train=np.random.choice([1,-1],[3000,2])
y_train=np.prod(x_train,axis=1).reshape(3000,1)
x_test=np.random.choice([1,-1],[100,2])
y_test=np.prod(x_test,axis=1).reshape(100,1)
loss=0
loss_test=0
accu_test=0
acc=0
Score_1=0
Score_2=0
Score_3=0
Grad_33=0
Grad_21=0
Grad_22=0
Grad_aux=0
Loss=[]
Losstest=[]
Accu_test=[]
Accu=[]
i=0
"""Ahora defino las neuronas"""
N_11=Neu_tgh(1)
N_12=Neu_tgh(1)
N_21=Neu_tgh(2)
N_22=Neu_tgh(2)
N_33=Neu_tgh(2)
"""Ahora realizo el entrenamiento"""
for j in range(30):
for k in range(100):
Score_1=[N_11.Fordware(x_train[i][0]),N_12.Fordware(x_train[i][1])]
Score_2=[N_21.Fordware(Score_1),N_22.Fordware(Score_1)]
Score_3=N_33.Fordware(Score_2)
"""Determino la loss del entrenamiento"""
loss=np.square(y_train[i]-Score_3) #Loss de MSE
"""Determino el gradiente"""
Grad_33=N_33.Backpropagation((y_train[i]-Score_3)*(-2))
Grad_21=N_21.Backpropagation(Grad_33[0][0])
Grad_22=N_22.Backpropagation(Grad_33[0][1])
N_11.Backpropagation(Grad_21[0][0]+Grad_22[0][0])
N_12.Backpropagation(Grad_21[0][1]+Grad_22[0][1])
"""Determino el accu"""
if (Score_3>0 and y_train[i]>0)or (Score_3<0 and y_train[i]<0):
acc+=1
i+=1
loss_test,accu_test= Loss_test(N_11,N_12,N_21,N_22,N_33,x_test,y_test)
Losstest.append(loss_test)
Accu_test.append(accu_test)
Loss.append(float(loss)/100)
loss=0
Accu.append(acc)
acc=0
"""Grafico los resultados"""
"""Grafico la loss"""
f,(ax1)=plt.subplots(1)
ax1.plot(range(30),Loss,color="K",label="Loss train");ax1.plot(range(30),Losstest,color="r",label="Loss teste")
plt.legend()
"""Grafico la acc"""
f,(ax2)=plt.subplots(1)
ax2.plot(range(30),Accu,color="K",label="Acc train");ax2.plot(range(30),Accu_test,color="r",label="Acc teste")
plt.legend()
"""Ahora realizo el ejercicio con la segunda arquitectura"""
N2_11=Neu_tgh(1)
N2_12=Neu_tgh(1)
N2_22=Neu_tgh(2)
N2_33=Neu_tgh(3)
Loss2=[]
Losstest2=[]
Accu_test2=[]
Accu2=[]
i=0
"""Ahora realizo el entrenamiento"""
for j in range(30):
for k in range(100):
Score_1=[N2_11.Fordware(x_train[i][0]),N2_12.Fordware(x_train[i][1])]
Score_2=[N2_22.Fordware(Score_1)]
Score_1.append(Score_2[0])
Score_3=N2_33.Fordware(Score_1)
"""Determino la loss del entrenamiento"""
loss=np.square(y_train[i]-Score_3) #Loss de MSE
"""Determino el gradiente"""
Grad2_33=N2_33.Backpropagation((y_train[i]-Score_3)*(-2))
Grad2_22=N2_22.Backpropagation(Grad2_33[0][2])
N2_11.Backpropagation(Grad2_22[0][0]+Grad2_33[0][0])
N2_12.Backpropagation(Grad2_22[0][1]+Grad2_33[0][1])
"""Determino el accu"""
if (Score_3>0 and y_train[i]>0)or (Score_3<0 and y_train[i]<0):
acc+=1
i+=1
loss_test,accu_test= Loss_test2(N2_11,N2_12,N2_22,N2_33,x_test,y_test)
Losstest2.append(loss_test)
Accu_test2.append(accu_test)
Loss2.append(float(loss)/100)
loss=0
Accu2.append(acc)
acc=0
"""Grafico los resultados"""
"""Grafico la loss"""
f,(ax3)=plt.subplots(1)
ax3.plot(range(30),Loss2,color="K",label="Loss train 2");ax3.plot(range(30),Losstest2,color="r",label="Loss teste 2")
plt.legend()
"""Grafico la acc"""
f,(ax4)=plt.subplots(1)
ax4.plot(range(30),Accu2,color="K",label="Acc train 2");ax4.plot(range(30),Accu_test2,color="r",label="Acc teste 2")
plt.legend()
|
[
"tomas.garcia.fisica@gmail.com"
] |
tomas.garcia.fisica@gmail.com
|
a12499d02081c3a097095e5f9b609a3d9730a9a4
|
d9fb5fa6b69d8ad0e9608c98903ac83e84da25b3
|
/live_iniciando_django/urls.py
|
3c93ea96cc3475389e75545875aaaf0c0f304fc4
|
[] |
no_license
|
schoolofnetcom/live-iniciando-django
|
bf80b71a868e1aba92b55e27c42672db68b4a26f
|
a991d6e9ec45421d393ca9d3b0cca86140b2537d
|
refs/heads/master
| 2021-04-09T13:03:29.591181
| 2018-03-16T03:34:44
| 2018-03-16T03:34:44
| 125,459,068
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 831
|
py
|
"""live_iniciando_django URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from my_app import views
urlpatterns = [
path('admin/', admin.site.urls),
path('products/',views.products_list)
]
|
[
"argentinaluiz@gmail.com"
] |
argentinaluiz@gmail.com
|
431cefec3649a3cd0e8cc6955a40346b2f736fec
|
9dcbe30676e2df00b3d7cb308885cb17dda89494
|
/qa/rpc-tests/qtum-condensing-txs.py
|
050b0348d84a904922a29e7b8ea59a5ef75a8368
|
[] |
no_license
|
bkartel1/chat_wallet
|
9d901067b4bf140c20e82a0ff3e42cddfc1706b3
|
d8919bfd4bae461992ff84dc8a66ea01d78a7c5b
|
refs/heads/master
| 2020-03-16T20:17:48.112412
| 2018-04-25T13:43:41
| 2018-04-25T13:43:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,384
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.openchat import *
import sys
class CondensingTxsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [['-txindex=1', '-rpcmaxgasprice=10000000']])
self.node = self.nodes[0]
self.is_network_split = False
# verify that the state hash is not 0 on genesis
def setup_contracts(self):
"""
pragma solidity ^0.4.0;
contract Sender1 {
// Sender2 sender2;
// Sender3 sender3;
address public sender2;
address public sender3;
function Sender1() {
}
function setSenders(address senderx, address sendery) public{
// sender2=Sender2(senderx);
// sender3=Sender3(sendery);
sender2 = senderx;
sender3 = sendery;
}
function share() public payable{
if(msg.sender != address(sender3)){
// sender2.share.value(msg.value/2);
sender2.call.value(msg.value/2)(bytes4(sha3("share()")));
}
}
function sendAll() public payable{
// sender2.keep.value(msg.value + this.balance);
// sender2.call.value(msg.value + this.balance)(bytes4(sha3("keep()")));
sender2.call.value(this.balance)(bytes4(sha3("keep()")));
}
function keep() public payable{
}
function() payable { } //always payable
}
contract Sender2{
// Sender1 sender1;
// Sender3 sender3;
address public sender1;
address public sender3;
function Sender2() {
}
function setSenders(address senderx, address sendery) public{
// sender1=Sender1(senderx);
// sender3=Sender3(sendery);
sender1 = senderx;
sender3 = sendery;
}
function share() public payable{
// sender3.share.value(msg.value/2);
sender3.call.value(msg.value/2)(bytes4(sha3("share()")));
}
function keep() public payable{
}
function withdrawAll() public{
// sender3.withdraw();
sender3.call(bytes4(sha3("withdraw()")));
msg.sender.send(this.balance);
}
function() payable { } //always payable
}
contract Sender3 {
// Sender1 sender1;
// Sender2 sender2;
address public sender1;
address public sender2;
function Sender3() {
}
function setSenders(address senderx, address sendery) public{
// sender1=Sender1(senderx);
// sender2=Sender2(sendery);
sender1 = senderx;
sender2 = sendery;
}
function share() public payable{
// sender1.share.value(msg.value/2);
// sender2.keep.value(msg.value/4);
sender1.call.value(msg.value/2)(bytes4(sha3("share()")));
sender2.call.value(msg.value/4)(bytes4(sha3("keep()")));
}
function withdraw() public{
msg.sender.send(this.balance);
}
function() payable { } //always payable
}
"""
sender1_bytecode = "6060604052341561000c57fe5b5b5b5b6104cb8061001e6000396000f30060606040523615610076576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680635579818d1461007f578063622836a3146100d45780639b0079d414610126578063a8d5fd6514610178578063e14f680f14610182578063e4d06d821461018c575b61007d5b5b565b005b341561008757fe5b6100d2600480803573ffffffffffffffffffffffffffffffffffffffff1690602001909190803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610196565b005b34156100dc57fe5b6100e461021d565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561012e57fe5b610136610243565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b610180610269565b005b61018a6103a9565b005b61019461049c565b005b81600060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555080600160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055505b5050565b600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff161415156103a657600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1660023481151561030557fe5b0460405180807f7368617265282900000000000000000000000000000000000000000000000000815250600701905060405180910390207c01000000000000000000000000000000000000000000000000000000009004906040518263ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040180905060006040518083038185886187965a03f19350505050505b5b565b600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163073ffffffffffffffffffffffffffffffffffffffff163160405180807f6b65657028290000000000000000000000000000000000000000000000000000815250600601905060405180910390207c01000000000000000000000000000000000000000000000000000000009004906040518263ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040180905060006040518083038185886187965a03f19350505050505b565b5b5600a165627a7a72305820b491c90fc7b4f09ab3f6262b83707908d390a97f9730429d1ff5fa8e44a63b190029"
self.sender1 = self.node.createcontract(sender1_bytecode, 1000000, OPENCHAT_MIN_GAS_PRICE/COIN)['address']
sender2_bytecode = "6060604052341561000c57fe5b5b5b5b6104b28061001e6000396000f30060606040523615610076576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680635579818d1461007f578063853828b6146100d45780639b0079d4146100e6578063a8d5fd6514610138578063e4d06d8214610142578063f34e0e7b1461014c575b61007d5b5b565b005b341561008757fe5b6100d2600480803573ffffffffffffffffffffffffffffffffffffffff1690602001909190803573ffffffffffffffffffffffffffffffffffffffff1690602001909190505061019e565b005b34156100dc57fe5b6100e4610225565b005b34156100ee57fe5b6100f661034f565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b610140610375565b005b61014a61045d565b005b341561015457fe5b61015c610460565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b81600060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555080600160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055505b5050565b600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1660405180807f7769746864726177282900000000000000000000000000000000000000000000815250600a01905060405180910390207c010000000000000000000000000000000000000000000000000000000090046040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040180905060006040518083038160008761646e5a03f192505050503373ffffffffffffffffffffffffffffffffffffffff166108fc3073ffffffffffffffffffffffffffffffffffffffff16319081150290604051809050600060405180830381858888f19350505050505b565b600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff166002348115156103ba57fe5b0460405180807f7368617265282900000000000000000000000000000000000000000000000000815250600701905060405180910390207c01000000000000000000000000000000000000000000000000000000009004906040518263ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040180905060006040518083038185886187965a03f19350505050505b565b5b565b600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16815600a165627a7a723058201842d5027fea2d624a38de6731e71832836efe8c51e5815b8ad85b7f3639e72a0029"
self.sender2 = self.node.createcontract(sender2_bytecode, 1000000, OPENCHAT_MIN_GAS_PRICE/COIN)['address']
sender3_bytecode = "6060604052341561000c57fe5b5b5b5b6104a88061001e6000396000f3006060604052361561006b576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680633ccfd60b146100745780635579818d14610086578063622836a3146100db578063a8d5fd651461012d578063f34e0e7b14610137575b6100725b5b565b005b341561007c57fe5b610084610189565b005b341561008e57fe5b6100d9600480803573ffffffffffffffffffffffffffffffffffffffff1690602001909190803573ffffffffffffffffffffffffffffffffffffffff169060200190919050506101dc565b005b34156100e357fe5b6100eb610263565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b610135610289565b005b341561013f57fe5b610147610456565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b3373ffffffffffffffffffffffffffffffffffffffff166108fc3073ffffffffffffffffffffffffffffffffffffffff16319081150290604051809050600060405180830381858888f19350505050505b565b81600060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555080600160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055505b5050565b600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff166002348115156102ce57fe5b0460405180807f7368617265282900000000000000000000000000000000000000000000000000815250600701905060405180910390207c01000000000000000000000000000000000000000000000000000000009004906040518263ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040180905060006040518083038185886187965a03f1935050505050600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff166004348115156103b357fe5b0460405180807f6b65657028290000000000000000000000000000000000000000000000000000815250600601905060405180910390207c01000000000000000000000000000000000000000000000000000000009004906040518263ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040180905060006040518083038185886187965a03f19350505050505b565b600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16815600a165627a7a72305820cb1b06b481990e1e218f7d0b51a3ffdf5b7439cfdd9bb2dccc1476cb84dfc95b0029"
self.sender3 = self.node.createcontract(sender3_bytecode, 1000000, OPENCHAT_MIN_GAS_PRICE/COIN)['address']
self.node.generate(1)
assert(len(self.node.listcontracts()) == 3+NUM_DEFAULT_DGP_CONTRACTS)
self.keep_abi = "e4d06d82"
self.sendAll_abi = "e14f680f"
self.setSenders_abi = "5579818d"
self.share_abi = "a8d5fd65"
self.withdrawAll_abi = "853828b6"
self.withdraw_abi = "3ccfd60b"
self.sender1_abi = "f34e0e7b"
self.sender2_abi = "622836a3"
self.sender3_abi = "9b0079d4"
padded_sender1 = self.sender1.zfill(64)
padded_sender2 = self.sender2.zfill(64)
padded_sender3 = self.sender3.zfill(64)
self.node.sendtocontract(self.sender1, self.setSenders_abi + padded_sender2 + padded_sender3)
self.node.sendtocontract(self.sender2, self.setSenders_abi + padded_sender1 + padded_sender3)
self.node.sendtocontract(self.sender3, self.setSenders_abi + padded_sender1 + padded_sender2)
self.node.generate(1)
# Verify that the senders have been set correctly
assert_equal(self.node.callcontract(self.sender1, self.sender2_abi)['executionResult']['output'][24:], self.sender2)
assert_equal(self.node.callcontract(self.sender1, self.sender3_abi)['executionResult']['output'][24:], self.sender3)
assert_equal(self.node.callcontract(self.sender2, self.sender1_abi)['executionResult']['output'][24:], self.sender1)
assert_equal(self.node.callcontract(self.sender2, self.sender3_abi)['executionResult']['output'][24:], self.sender3)
assert_equal(self.node.callcontract(self.sender3, self.sender1_abi)['executionResult']['output'][24:], self.sender1)
assert_equal(self.node.callcontract(self.sender3, self.sender2_abi)['executionResult']['output'][24:], self.sender2)
def run_test(self):
self.node.generate(COINBASE_MATURITY+50)
print("Setting up contracts and calling setSenders")
self.setup_contracts()
A1 = self.node.getnewaddress()
self.node.sendtoaddress(A1, 1)
self.node.generate(1)
assert("vin" not in self.node.getaccountinfo(self.sender1))
assert("vin" not in self.node.getaccountinfo(self.sender2))
assert("vin" not in self.node.getaccountinfo(self.sender3))
T1_id = self.node.sendtocontract(self.sender1, self.share_abi, 8)['txid']
B2_id = self.node.generate(1)[0]
B2 = self.node.getblock(B2_id)
# Since this is a ṔoW block we only require 3 txs atm (coinbase, T1 and COND tx)
assert_equal(B2['tx'][1], T1_id)
assert_equal(len(B2['tx']), 3)
C1_id = B2['tx'][2]
C1 = self.node.getrawtransaction(C1_id, True)
assert_vin(C1, [('OP_SPEND', )])
assert_vout(C1, [(5, 'call'), (2.5, 'call'), (0.5, 'call')])
assert("vin" in self.node.getaccountinfo(self.sender1))
assert("vin" in self.node.getaccountinfo(self.sender2))
assert("vin" in self.node.getaccountinfo(self.sender3))
# We set the tx fee of T2 to a higher value such that it will be prioritized (be at index 1 in the block)
T2_id = self.node.sendtocontract(self.sender1, self.keep_abi, 2, 50000, 0.0001)['txid']
T3_id = self.node.sendtocontract(self.sender1, self.sendAll_abi, 2)['txid']
B3_id = self.node.generate(1)[0]
B3 = self.node.getblock(B3_id)
# coinbase, T2, C2, T3, C3
assert_equal(len(B3['tx']), 5)
assert_equal(B3['tx'][1], T2_id)
C2_id = B3['tx'][2]
C3_id = B3['tx'][4]
C2 = self.node.getrawtransaction(C2_id, True)
C3 = self.node.getrawtransaction(C3_id, True)
assert_vin(C2, [('OP_SPEND', ), ('OP_SPEND', )])
assert_vout(C2, [(7, 'call')])
assert_vin(C3, [('OP_SPEND', ), ('OP_SPEND', ), ('OP_SPEND', )])
assert_vout(C3, [(11.5, 'call')])
assert("vin" not in self.node.getaccountinfo(self.sender1))
assert("vin" in self.node.getaccountinfo(self.sender2))
assert("vin" in self.node.getaccountinfo(self.sender3))
# We need the txfee to be higher than T5 so that T4 tx is prioritized over T5.
# We set the gas such that the the tx will run but not immediately throw a out of gas exception
T4_raw = make_transaction(self.node, [make_vin(self.node, 3*COIN)], [make_op_call_output(2*COIN, b"\x04", 22000, CScriptNum(OPENCHAT_MIN_GAS_PRICE), hex_str_to_bytes(self.share_abi), hex_str_to_bytes(self.sender2))])
T4_id = self.node.sendrawtransaction(T4_raw)
T5_id = self.node.sendtocontract(self.sender2, self.withdrawAll_abi, 0, 1000000, OPENCHAT_MIN_GAS_PRICE/COIN, A1)['txid']
B4_id = self.node.generate(1)[0]
B4 = self.node.getblock(B4_id)
# Coinbase, T4, R1, T5, C4
assert_equal(len(B4['tx']), 5)
assert_equal(B4['tx'][1], T4_id)
assert_equal(B4['tx'][3], T5_id)
R1_id = B4['tx'][2]
R1 = self.node.getrawtransaction(R1_id, True)
C4_id = B4['tx'][4]
C4 = self.node.getrawtransaction(C4_id, True)
assert_vout(R1, [(2, 'pubkeyhash')])
assert_vin(C4, [('OP_SPEND', ), ('OP_SPEND', )])
assert_vout(C4, [(12, 'pubkeyhash')])
assert_equal(sum(self.node.listcontracts().values()), 0)
assert("vin" not in self.node.getaccountinfo(self.sender1))
assert("vin" not in self.node.getaccountinfo(self.sender2))
assert("vin" not in self.node.getaccountinfo(self.sender3))
if __name__ == '__main__':
CondensingTxsTest().main()
|
[
"eug.ray@hotmail.com"
] |
eug.ray@hotmail.com
|
8984d7b687d64f3221ccef0626348690949d4c6e
|
6346faf720a7bcedcb6b87c63291b1249df1cbcc
|
/django_dropbox_csv_export/integrations/tests/test_models.py
|
5e7a87265dd1852e9997b70fd6864436f3a9ea04
|
[
"MIT"
] |
permissive
|
zkan/django-dropbox-csv-export
|
d0e50fa589b1681386b64a105200cb0f11e6ab30
|
5e77c539d84acf59d6f1dc1ffe3515b13fc34565
|
refs/heads/master
| 2021-07-18T01:19:24.355752
| 2017-10-25T13:18:34
| 2017-10-25T13:18:34
| 108,135,866
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 652
|
py
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from ..models import Integration
class IntegrationTest(TestCase):
def test_save_integration(self):
User = get_user_model()
user = User.objects.create(
username='kan',
password='12345',
email='kan@pronto.com'
)
integration = Integration()
integration.user = user
integration.access_token = 'abc'
integration.save()
integration = Integration.objects.last()
self.assertEqual(integration.user, user)
self.assertEqual(integration.access_token, 'abc')
|
[
"kan@prontomarketing.com"
] |
kan@prontomarketing.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.