blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c12de7349ec39732017cff9b64ee3015361295e3 | 2a160a8491bccee82c05cd878b11c564b8471583 | /pull_stream/pull_live_stream.py | 9967a0f2937867c9b04c8045606ded1f0b61f33e | [] | no_license | ETalienwx/Bwork_code | 7eba821bd060fe7005d2bd35a5d6490745ec7ed8 | ac557887a23e12f7fd1c9ef6f094e6ee66780902 | refs/heads/master | 2023-02-11T10:19:15.625806 | 2021-01-11T06:16:25 | 2021-01-11T06:16:25 | 319,890,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,382 | py | # 拉取直播流
import subprocess
import requests
import time
import os
from enum import Enum
class BaseInfo(object):
def from_dict(self, meta: dict):
props = self.__dict__
for k in props:
v = meta.get(k, None)
setattr(self, k, v)
def to_dict(self) -> dict:
props = {}
for k, v in self.__dict__.items():
value = v
if v.__class__.__base__ is Enum:
value = v.value
props[k] = value
return props
class SrtResult(BaseInfo):
def __init__(self):
super(SrtResult, self).__init__()
self.command = None
self.stdout = None
self.stderr = None
self.timeout = None
self.killed = False
def download_file(url):
print(url)
headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36"}
try:
response = requests.get(url, stream=True, headers=headers)
with open("input.flv", "wb") as pdf:
start = time.time()
for chunk in response.iter_content(chunk_size=1024):
if chunk:
pdf.write(chunk)
if (time.time() - start) > 1:
break
except Exception as error:
print("Request stream url error!", error)
if os.path.getsize("input.flv") > 0:
print("download file success!")
return 1
else:
print("download file error!")
return -1
def pull_first_packet():
command = "ffprobe -show_packets input.flv"
args = dict(
shell=True,
stderr=subprocess.STDOUT,
encoding='utf-8',
timeout=2,
)
result = SrtResult()
result.command = command
result.timeout = 2
try:
result_str = subprocess.check_output(command, **args)
result.stdout = result_str
except subprocess.TimeoutExpired as exception:
result.killed = True
if exception.stdout is not None:
result.stdout = exception.stdout.decode('utf-8')
if exception.stderr is not None:
result.stderr = exception.stderr.decode('utf-8')
packet_start = result.stdout.find("[PACKET]")
packet_end = result.stdout.find("[/PACKET]") + 9
packer_str = result.stdout[packet_start:packet_end]
result.stdout = packer_str
print(result.to_dict())
def main():
# 正确的流
url = "https://d1--cn-gotcha04.bilivideo.com/live-bvc/852011/live_69307_2194433_1500.flv?cdn=cn-gotcha04&expires=1598589796&len=0&oi=3030954244&pt=web&qn=150&trid=7fd77e91987f4e52990406d16ab9243f&sigparams=cdn,expires,len,oi,pt,qn,trid&sign=5e66a350499eefd2bc5c5829f4141706&ptype=0&src=9&sl=2&order=1&platform=web&pSession=aj9z3Mdy-7iCk-4kXr-BcCx-4H5MYx6Z3hJp"
# 错误的流
# url = "https://d1--cn-gotcha04.bilivideo.com/live-bvc/208258/live_52926766_1129961_1500.flv?cdn=cn-gotcha04&expires=1597217512&len=0&oi=3030954244&pt=web&qn=150&trid=ed819650419545c09b72800ce7548c57&sigparams=cdn,expires,len,oi,pt,qn,trid&sign=5785ccd89c84f4fd9f188ed63474774d&ptype=0&src=9&sl=3&order=1&platform=web&pSession=kKbe92E4-DeC4-4QT2-penA-8mimYiXc3Ktn"
res = download_file(url)
if res == 1:
pull_first_packet()
else:
print("stream expire!")
if __name__ == '__main__':
main()
| [
"wangxuan02@bilibili.com"
] | wangxuan02@bilibili.com |
9835f1aebb85255ca2146df76cdcb41c6f43a428 | 4d18e2e6abf050e0816024661c8467d0d588adfd | /lib/python2.7/types.py | e467f32986a57b00e083cdebfa84af04c69a89b3 | [] | no_license | jseuribe/Next_Steps | 1fdfbebfbf6087b4297d3d88153d3709cd85a530 | dda5b50770c75c0e863fa61dba23a1ed1b60bf9f | refs/heads/master | 2021-01-21T13:53:24.919527 | 2016-05-25T01:59:15 | 2016-05-25T01:59:16 | 51,565,879 | 2 | 1 | null | 2016-05-20T23:58:22 | 2016-02-12T03:53:48 | HTML | UTF-8 | Python | false | false | 45 | py | /home/joseu/miniconda2/lib/python2.7/types.py | [
"jseuribe@hotmail.com"
] | jseuribe@hotmail.com |
e3578fcf72242dbd6ea5994ec08630bbdbb6b631 | 5185529b885d37bc1a6c7a7de21fd8b6ecfaf11a | /mapas/Hangar.py | 00d012e7a1bb6a6a4532c9c792bc4390dcbf5444 | [] | no_license | igorssmanoel/ContraRM | 020dd8449fc31b72c9fa3e7517a388be721cb5f5 | d28755a3bfb9449529f58fe93f33f827a0da41f2 | refs/heads/master | 2022-05-13T05:42:25.900805 | 2017-07-13T00:05:41 | 2017-07-13T00:05:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,959 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pygame
from Inimigo import *
from Melhoria import *
from Parede import *
from Chao import *
class Hangar:
FaseCaminho = "mapas/Hangar.png"
FaseLargura = 2444
FaseMusic = "sons/Hangar.mp3"
#Chãos do Mapa
def Coloca_Chao(self, chao):
# Posição | Comprimento
chao.empty()
chao.add(Chao([0,310], 512))
chao.add(Chao([550,247], 410))
chao.add(Chao([520,440], 475))
chao.add(Chao([1000,375], 215))
chao.add(Chao([1258,310], 160))
chao.add(Chao([1227,440], 420))
chao.add(Chao([1450,247], 160))
chao.add(Chao([1643,376], 245))
chao.add(Chao([1900,310], 540))
# Paredes do Mapa
def Coloca_Paredes(self, paredes):
paredes.empty()
paredes.add(Parede([514,437], 120))
paredes.add(Parede([999,437], 60))
paredes.add(Parede([1218,437], 60))
# Lista de Melhorias
# L - Vida Extra
# R - Atirar 2x mais rápido
# M - Tiro 2x mais forte
def Coloca_Melhorias(self, melhorias):
#Posição | Tipo
melhorias.empty()
melhorias.add(Melhoria([1043,170], "R"))
melhorias.add(Melhoria([1529,170], "L"))
# Lista de Inimigos
# Explicando o Ativo:
# Este valor significa a porcentagem da tela de distância entre o jogador e o inimigo.
# Se o inimigo estiver em uma distância menor que a porcentagem, o inimigo entra no estado ativo (usa a Matriz de IA).
# Exemplo: valor = 60 -> Se o jogador estiver em uma distância menor que 60% do tamanho da tela, o inimigo começa a atirar.
# Explicando a Matriz de IA dos inimigos:
# 0 - Direção que irá atirar. (0-midleft, 1-midright, 2-topright, 3-bottomright, 4-topleft, 5-bottomleft, 6-midtop)
# 1 - Tempo de espera para iniciar a rajada, em milisegundos.
# 2 - Quantidade de tiros da rajada
# 3 - Tempo entre cada tiro da rajada, em milisegundos.
# 4 - Correção em pixels da criação do tiro à partir da direção escolhida.
# É seguida a ordem da esquerda para a direita. Volta ao início quando chegar ao fim.
# Forma da matriz: matriz = [[0,1,2,3,4], [0,1,2,3,4], [0,1,2,3,4]]
#
# Forma do cálculo da correção de Pixels (4):
# + 4 -/- 6 +/- 2 +
# - * * * * * -
# 0 J 1
# + * * * * * +
# + 5 -/- 7 +/- 3 +
def Coloca_Inimigos(self, inimigos):
# Posição | Sprite | Quantidade de Vidas | Escala | Escala da Explosão | Ativo (? = 0) | Matriz IA (? = None)
inimigos.empty()
inimigos.add(Inimigo([552,436], "sprites/Atirador/AtiradorEsquerdaCima.gif", 2, 1.5, 1.5, 60, [[4,1000,2,500, 0], [4,1000,3,500, 0]]))
inimigos.add(Inimigo([655,240], "sprites/Atirador/AtiradorDeitadoEsquerda.gif", 2, 1.5, 1.5, 60, [[0,1000,2,500, 0], [0,1000,1,500, 0]]))
inimigos.add(Inimigo([797,436], "sprites/Atirador/AtiradorEsquerda.gif", 2, 1.5, 1.5, 60, [[0,1000,3,500, -18], [0,1000,3,1000, -18]]))
inimigos.add(Inimigo([855,70], "sprites/Torres/TorreBaixoEsquerda.gif", 5, 1.3, 1.3, 90, [[5,700,3,500, 0]]))
inimigos.add(Inimigo([915,240], "sprites/Atirador/AtiradorEsquerda.gif", 2, 1.5, 1.5, 60, [[0,1000,3,500, -18], [0,1000,3,1000, -18]]))
inimigos.add(Inimigo([1014,450], "sprites/Torres/TorreEsquerda.gif", 5, 1.3, 1.3, 90, [[0,700,3,500, 0]]))
inimigos.add(Inimigo([1145,374], "sprites/Atirador/BigGunMan.png", 4, 1.5, 1.5, 60, [[0,500,2,500, -13], [0,1000,3,500, -13]]))
inimigos.add(Inimigo([1317,307], "sprites/Atirador/AtiradorEsquerda.gif", 2, 1.5, 1.5, 60, [[0,1000,3,500, -18], [0,1000,3,1000, -18]]))
inimigos.add(Inimigo([1432,436], "sprites/Atirador/AtiradorEsquerda.gif", 2, 1.5, 1.5, 60, [[0,1000,3,500, -18], [0,1000,3,1000, -18]]))
inimigos.add(Inimigo([1449,290], "sprites/Torres/TorreEsquerda.gif", 5, 1.5, 1.5, 60, [[0,1000,2,500, 0], [0,1000,3,500, 0]]))
inimigos.add(Inimigo([1522,240], "sprites/Atirador/AtiradorDeitadoEsquerda.gif", 2, 1.5, 1.5, 60, [[0,1000,2,500, 0], [0,1000,1,500, 0]]))
inimigos.add(Inimigo([1740,462], "sprites/Torres/TorreCimaEsquerda.gif", 5, 1.3, 1.3, 90, [[4,700,3,500, 0]]))
inimigos.add(Inimigo([1781,374], "sprites/Atirador/BigGunMan.png", 4, 1.5, 1.5, 60, [[0,500,2,500, -13], [0,1000,3,500, -13]]))
inimigos.add(Inimigo([1992,306], "sprites/Paredes/Parede1.gif", 5, 1.5, 2))
inimigos.add(Inimigo([2024,170], "sprites/Bosses/boss2.gif", 6, 0.5, 3.5, 90, [[7, 500, 4, 500, 0], [7, 500, 5, 500, 0]]))
inimigos.add(Inimigo([2124,170], "sprites/Bosses/boss2.gif", 6, 0.5, 3.5, 90, [[7, 500, 4, 500, 0], [7, 500, 5, 500, 0]]))
inimigos.add(Inimigo([2224,170], "sprites/Bosses/boss2.gif", 6, 0.5, 3.5, 90, [[7, 500, 4, 500, 0], [7, 500, 5, 500, 0]]))
inimigos.add(Inimigo([2165,306], "sprites/Atirador/AtiradorDeitadoEsquerda.gif", 2, 1.5, 1.5, 60, [[0,1000,2,500, 0], [0,1000,1,500, 0]]))
inimigos.add(Inimigo([2124,462], "sprites/Torres/TorreCimaEsquerda.gif", 5, 1.3, 1.3, 90, [[4,700,3,500, 0]]))
boss1 = Inimigo([2300,306], "sprites/Bosses/boss3.gif", 35, 1.5, 3.5, 90, [[0, 1000, 3, 500, -15], [5, 1000, 3, 500, 0]])
inimigos.add(boss1)
return boss1
| [
"biasi131@gmail.com"
] | biasi131@gmail.com |
837945adfe5f58e0b2985accb3351473b1800bd9 | d1ff6b7feaf22eb7940281a17b823d6899bcfa8c | /python/problems/leetcode/1-two-sum.py | 37fd5066a714d2fe119697190f2cfbb831a6be0c | [] | no_license | sumitkrm/lang-1 | 42e839dae8caf78986a10bb3d635021d45f66593 | c08fdd1556b6dbbdda8ad6210aa0eaa97074ae3b | refs/heads/master | 2021-04-18T10:26:27.466682 | 2019-11-14T07:56:45 | 2019-11-14T07:56:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,418 | py | """
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
# function to check for the given sum in the array
def returnIndexPair(arr, sum):
print ("A =", arr, "n=", sum)
data = {}
j = 1
for i in arr:
temp = sum - i
if (temp in data):
return [data[temp] - 1, j - 1]
data[i] = j
j = j + 1
return []
A = [3, 3]
n = 6
print(returnIndexPair(A, n))
A = [3, 2, 4]
n = 6
print(returnIndexPair(A, n))
A = [1, 4, 45, 6, 10, -8]
n = 16
print(returnIndexPair(A, n))
A = [1, 2, 4]
n = 6
print(returnIndexPair(A, n))
A = [-3, 4, 3, 90]
n = 0
print(returnIndexPair(A, n))
def twoSum(nums, target):
data = {}
j = 0
for i in nums:
data[i] = j
j = j + 1
j = 0
for i in nums:
if target - i in data:
if data[i] != data[target - i]:
return [j, data[target - i]]
j = j + 1
return []
# arr = [2, 7, 11, 15]
# target = 26
# print (twoSum(arr, target))
# arr = [3, 2, 4]
# target = 6
# print (twoSum(arr, target))
# # This testcase is not passing!!!
# arr = [3, 3]
# target = 6
# print (twoSum(arr, target))
| [
"noreply@github.com"
] | sumitkrm.noreply@github.com |
c6bccc7b878d8ad17ad5f51b5c1bd8f96e13bd0f | 1bfe5887d6158b7bc4c57b2b136b9f8b666c7bbd | /UHCF/run.py | f4e16833f92cba7a1a9a89d4ff32448098524c85 | [] | no_license | wenzhiquan/lab | 879c6e835b99f224b2bff0ea0847de81dc488dde | 37cd41244e281b84663d752b72f3ca3b13f3c37f | refs/heads/master | 2021-01-17T08:53:18.717864 | 2016-05-17T08:23:22 | 2016-05-17T08:23:22 | 22,674,403 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,665 | py | #!/env/python
# -*- encoding: utf-8 -*-
"""
@version: 0.1
@author: wenzhiquan
@contact: wenzhiquanr@163.com
@site: http://github.wenzhiquan.com
@software: PyCharm
@file: run.py
@time: 16/1/5 21:43
@description: null
"""
from datetime import datetime
from tools.sortByTime import sortByTime
from tools.combineById import combineById
from tools.divideTrainAndTest import divideTrainAndTest
from tools.timeInterval import timeInterval
from CFU.CFU import CFU
from CFU.promoteCFU import PromoteCFU
from core.UHCF import UHCF
from common.evaluation import Evaluation
from common.recommendation import generaRecommendList
from common.combineCFUAndTHCCF import combine
from common.movieAttr import MovieAttr
from config import config
if __name__ == '__main__':
startTime = datetime.now()
print 'program start......'
print 'start time :'
print startTime
# movieAttr = MovieAttr()
# movieAttr.commonLabel()
# timeInterval()
if config.needDivideTrainAndTest is True:
divideTrainAndTest()
if config.needPreSettle is True:
sortByTime()
combineById()
if config.needUHCF is True:
uhcf = UHCF()
uhcf.generaUserPrefer()
uhcf.simCalculate()
generaRecommendList(config.userSimMatrix)
if config.needCFU is True:
# cfu = CFU()
# cfu.matrix()
# generaRecommendList()
cfu = PromoteCFU()
cfu.iuMatrix()
# cfu.matrix()
# generaRecommendList(config.promoteCFUUserSimMatrix)
if config.needCombine is True:
combine()
generaRecommendList(config.combineSimMatrix)
if config.needEvaluate is True:
evaluate = Evaluation()
rap = evaluate.recall_and_precision()
print "recall: %5.5f%%" % rap[0]
print "precision: %5.5f%%" % rap[1]
fvalue = evaluate.fvalue(rap)
print "F value: %5.5f%%" % fvalue
mae = 0 # evaluate.MAE()
print "MAE: %5.5f" % mae
# diversity = evaluate.diversity()
# print "diversity: %5.5f%%" % diversity
outfile = r'result/evaluationResult.csv'
out = open(outfile, 'a')
spliter = ','
out.write(str(config.n) + spliter + str(config.listLength) +
spliter + str(config.G) + spliter + str(config.delta) +
spliter + str(rap[0])[:7] + '%' + spliter + str(rap[1])[:7] +
'%' + spliter + str(fvalue)[:7] + '%' + spliter + str(mae)[:7] + spliter + '\n')
out.close()
endTime = datetime.now()
print 'program finished......'
print 'finish time :'
print endTime
print 'total run time :'
print endTime - startTime
| [
"wenzhiquanr@163.com"
] | wenzhiquanr@163.com |
6d941d57a643281b395f760262b236f1cde01bab | 72bf58e646d31d915403da0fd9fbf4ab54affdde | /amboro_store/settings.py | c84af598bc65f15d668c230a4f6bd223be3bb4c4 | [] | no_license | JohannQuispe/ecommerce | b3f1ccb102c31c5c6640352e735afe9e08859623 | 37aa66373118c1e5f4ed53b853ac9e680766523d | refs/heads/master | 2022-12-09T22:59:06.438155 | 2020-08-24T06:12:26 | 2020-08-24T06:12:26 | 286,192,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,041 | py | """
Django settings for amboro_store project.
Generated by 'django-admin startproject' using Django 2.2.14.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ud$%8m&%*+v$uvkq)1mqn0z2!9h@w@dvrpb&ms$)kgp@7wtd6v'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1','.pythonanywhere.com']
# Application definition
INSTALLED_APPS = [
'users',
'carts',
'orders',
'charges',
'products',
'categories',
'billing_profiles',
'shipping_addresses',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'amboro_store.urls'
AUTH_USER_MODEL ='users.User'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'amboro_store.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'amboro_store',
'USER': 'postgres',
'PASSWORD': 'passwd',
'HOST': 'localhost',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
from decouple import config
EMAIL_HOST = 'smtp.googlemail.com'
EMAIL_PORT = '587'
EMAIL_HOST_USER = 'jquis812@gmail.com'
EMAIL_HOST_PASSWORD = 'jb7803105JB'
EMAIL_USE_TLS = True
STRIPE_PUBLIC_KEY = 'pk_test_51HImxHGU4eiL1A64tu8vUcXntO6aRiU836NBsShkl1hSWKnKcMYg8YqulUBSasf4rRrlTUdPP71k0obm1YxMEyrv00YYw1xrGE'
STRIPE_PRIVATE_KEY = 'sk_test_51HImxHGU4eiL1A640dpmLiI6zvqdULk7vyilaFghUBichHxttlj2wnfx3Lb7BJLsy3cCooPK1k0rAmqqyxFC4hfs00ue1UYSC6'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
| [
"jbquispe@hotmail.com"
] | jbquispe@hotmail.com |
e017adc706cc398730b245c4fc39a45eb54053f2 | 5776a924dbb3d305177b8c269cc86af2736c4b6a | /trending/trending.py | 95ddc2bc97c3fa013556a6fb6c10bd88bbc4d1ed | [
"Apache-2.0"
] | permissive | sipolac/trending | 8d248a98a1198472a42b25aff6f699b2ebe985a0 | 8155bcea5758a1b1cd7aa0c585658e754fe4c034 | refs/heads/master | 2020-06-29T01:15:33.554139 | 2019-08-25T21:51:51 | 2019-08-25T21:51:51 | 200,395,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,670 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Author: Chris Sipola
Created: 2019-08-02
Functions for quantifying "trending"-ness.
Some notes on "notation":
> `a` is used to represent a series/list/array of interest, in the style of
numpy. However, within the project, `a` treated as a list since I didn't
want a numpy dependency. In practice there should be no issue using a numpy
array or a pandas series.
> `growth` (or `g` if shortened) represents proportional growth. E.g., from
2 to 3, growth is 3 / 2 = 1.5.
> `rate` (or `r` if shortened) represents rate in the sense of a geometric
series: http://mathworld.wolfram.com/GeometricSeries.html
> `n` means the number of observations. Adjustments may need to be made based
on how `n` is used. For example, the typical index for the sum of a finite
geometric series goes from 0 to n, meaning there are actually n + 1
observations. So 1 must be subtracted before using this formula.
"""
from functools import reduce
from operator import mul
def _compute_growth(a):
"""Computes proportional growth between consecutive values in input list.
>>> _compute_growth([1, 2, 3, 4, 5])
[2.0, 1.5, 1.3333333333333333, 1.25]
"""
growth_list = [a[i] / a[i - 1] for i in range(1, len(a))]
return growth_list
def _geom_mean(growth_list, weights):
"""Computes weighted geometric mean."""
weighted = [g**w for g, w in zip(growth_list, weights)]
gmean = reduce(mul, weighted)**(1 / sum(weights))
return gmean
def _decaying_weights(n, r):
"""Computes weights that decay geometrically at rate r."""
weights = [r**(n - i - 1) for i in range(n)]
return weights
def recent_growth(a, r):
"""Computes geometric mean of growth rates, with more weight on recent obs.
Args:
a: List of floats for which to compute recent growth
r: Float for decay rate. At the extremes, 1 gives equal weight to
each observation, and (because 0**0 == 1 in Python) 0 gives all
the weight to the most recent observation
Returns:
Float for weighted geometric mean of growth rates
>>> recent_growth([5, 5, 5], r=0.8) # no trend
1.0
>>> recent_growth([4, 5, 6], r=0.8) # upward trend
1.2219704337257924
"""
if len(a) < 2:
raise Exception('input list `a` must have more than 1 value')
if r < 0 or r > 1:
raise Exception('`r` must be between 0 and 1 (inclusive)')
growth_list = _compute_growth(a)
weights = _decaying_weights(len(growth_list), r)
gmean = _geom_mean(growth_list, weights)
return gmean
def _geom_sum(r, n):
"""Computes sum of geometric series.
Use n=float('inf') for infinite series.
"""
return (1 - r**(n + 1)) / (1 - r)
def compute_weight_frac(r, last_n, total_n=None):
"""Computes fraction of total weight represented by last n obs.
That is, it computes:
[sum of weights of most recent n observations] divided by [sum of
weights of *all* observations], where *all* is either all actual
observations in the time series, or a theoretically infinite number
of observations.
Args:
r: Float for decay rate
last_n: Int for number of most recent observations
total_n: Int for total number of observations. If None, computes
the infinite geometric sum instead
Returns:
Float for fraction
"""
# n is inclusive in finite sum function so need to subtract 1.
if total_n is None:
total_n = float('inf')
frac = _geom_sum(r, last_n - 1) / _geom_sum(r, total_n - 1)
return frac
def find_r(frac, last_n, total_n=None, error_bound=1e-6):
"""Finds r s.t. the last n obs make up specified fraction of total weight.
Args:
frac: Float for proportion of total weight represented by last n
observations
n: Int for number of most recent observations
total_n: Float for total number of observations. If None, will use
infinite geometric sum instead
error_bound: Error bound of r
Returns:
Float for decay rate
>>> find_r(0.5, 10) # r such that last 10 obs make up 50% of total weight
0.9330339431762695
"""
if total_n is not None and last_n / total_n >= frac:
return 1
low, high = 0, 1
r = (low + high) / 2
test_frac = compute_weight_frac(r, last_n, total_n)
while high - low > error_bound:
test_frac = compute_weight_frac(r, last_n, total_n)
if test_frac > frac:
low = r
elif test_frac < frac:
high = r
else:
break
r = (low + high) / 2
return r
| [
"sipolac@gmail.com"
] | sipolac@gmail.com |
2f9963b5e8c4babf74fc6d9a8e0e0e7a894047c5 | 9f4d5b17ba701e6e9f9ade4441b7aae106c3fd84 | /mordred/Weight.py | 7ac3c7f37def4c167eefb82f583dee7c083f2f5e | [
"BSD-3-Clause"
] | permissive | simonbray/mordred | 55385e37b3f622513e75f00fe21fb7e6d1edf02d | bfb3b0a50fb7f42cd996e091d67c3a3dcc815134 | refs/heads/master | 2020-05-26T04:23:50.856152 | 2018-05-31T07:21:43 | 2018-05-31T07:21:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,138 | py | from rdkit.Chem.Descriptors import MolWt, ExactMolWt
from ._base import Descriptor
__all__ = (
"Weight",
)
class Weight(Descriptor):
r"""molecular weight descriptor.
:type averaged: bool
:param averaged: averaged by number of atom
"""
def description(self):
return "{}{}molecular weight".format(
"averaged " if self._averaged else "",
"exact " if self._exact else "",
)
since = "1.0.0"
__slots__ = ("_averaged", "_exact")
explicit_hydrogens = True
@classmethod
def preset(cls, version):
yield cls(True, False)
yield cls(True, True)
def __str__(self):
return "{}{}MW".format("A" if self._averaged else "", "" if self._exact else "a")
def parameters(self):
return self._exact, self._averaged
def __init__(self, exact=True, averaged=False):
self._averaged = averaged
self._exact = exact
def calculate(self):
w = ExactMolWt(self.mol) if self._exact else MolWt(self.mol)
if self._averaged:
w /= self.mol.GetNumAtoms()
return w
rtype = float
| [
"philopon.dependence@gmail.com"
] | philopon.dependence@gmail.com |
bdea9c72dd10be9794375d3cc3d1d61fea04371d | 490fec2286bbd3241f7b4e7e2520496087ef2271 | /manage.py | 54c58d756cb665d08282278ea8a0579fb24e3844 | [] | no_license | lupeixin/drf_03 | 689018fa14fd75f7be09f64390930b8459c83567 | 6dbb3b0e6883d8450d7d9d856511ece02217f1ee | refs/heads/master | 2022-12-11T21:58:35.381615 | 2020-09-15T00:52:52 | 2020-09-15T00:52:52 | 295,574,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 626 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'drf_03.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"813094269@qq.com"
] | 813094269@qq.com |
bfc337766c024645d34b4d882f875b699dde5fb8 | 7acb4e7602b706f26cbf0a5d5a69a08739e23b42 | /geektime/mathbase/chapter6/lesson6_1.py | 68c0c37772069ed48d427c9c2d3db38e070e0dbc | [] | no_license | JasenChu/debugtalk | 2577c8194f8ff10fd90cfc4998e9c27c9d5a9ea7 | 1b6ac9ebaeef88843e5fb732db4ee41872569ade | refs/heads/master | 2020-04-12T09:19:01.810342 | 2018-12-26T04:40:43 | 2018-12-26T04:40:43 | 162,398,792 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,382 | py | # 实现2路归并排序算法
def merge(leftSource = [], rightSource = []):
lenLeft = len(leftSource)
lenRight = len(rightSource)
# 定义存储合并后的list数组
merge_one = [0] * (lenLeft + lenRight)
mi = 0 # 定义合并list的下标
li = 0 # 定义左边数组的下标
ri = 0 # 定义右边数组的下标
while li < lenLeft and ri < lenRight:
# 左右和右边依次取第一个数字比较,小的放合并数组中
if leftSource[li] <= rightSource[ri]:
merge_one[mi] = leftSource[li]
li = li + 1
else:
merge_one[mi] = rightSource[ri]
ri = ri + 1
mi = mi + 1
if li < lenLeft: # 判断左边数组是否已经取值完毕
for i in range(li, lenLeft):
merge_one[mi] = leftSource[i]
mi = mi + 1
else: # 判断右边数组是否已经取值完毕
for j in range(ri, lenRight):
merge_one[mi] = rightSource[j]
mi = mi + 1
return merge_one
def merge_sort(to_sort = []):
if len(to_sort) == 1:
return to_sort
mid = len(to_sort) // 2
left = to_sort[:mid]
right = to_sort[mid:]
left = merge_sort(left)
right = merge_sort(right)
merged = merge(left, right)
return merged
if __name__ == '__main__':
print(merge_sort([10,20,9,50,30,99,3]))
| [
"xiaochu698@126.com"
] | xiaochu698@126.com |
9b80f24b60cf7a97705d6d7face0f6a14fab0453 | 5b82fa5f8d98c8fe6fbccae7566e7d9eaa2e7428 | /tests/arbitrage_test.py | 195cb57d48c295f8ee26d019b9b775eee39934ed | [
"MIT"
] | permissive | f0ster/bitcoin-arbitrage | a84325b78920b2850eed7673112786102afa3bb5 | 2c389fca988e6d24f3394adbc67d4a01259aa345 | refs/heads/master | 2020-04-15T03:15:13.794667 | 2013-04-18T01:39:47 | 2013-04-18T01:39:47 | 9,504,532 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,567 | py | import sys
sys.path.append('src/')
sys.path.append('../src/')
import unittest
import arbitrage
depths1 = {
'BitcoinCentralEUR':
{'asks': [{'amount': 4, 'price': 32.8},
{'amount': 8, 'price': 32.9},
{'amount': 2, 'price': 33.0},
{'amount': 3, 'price': 33.6}],
'bids': [{'amount': 2, 'price': 31.8},
{'amount': 4, 'price': 31.6},
{'amount': 6, 'price': 31.4},
{'amount': 2, 'price': 30}]},
'MtGoxEUR':
{'asks': [{'amount': 1, 'price': 34.2},
{'amount': 2, 'price': 34.3},
{'amount': 3, 'price': 34.5},
{'amount': 3, 'price': 35.0}],
'bids': [{'amount': 2, 'price': 33.2},
{'amount': 3, 'price': 33.1},
{'amount': 5, 'price': 32.6},
{'amount': 10, 'price': 32.3}]}}
depths2 = {
'BitcoinCentralEUR':
{'asks': [{'amount': 4, 'price': 32.8},
{'amount': 8, 'price': 32.9},
{'amount': 2, 'price': 33.0},
{'amount': 3, 'price': 33.6}]},
'MtGoxEUR':
{'bids': [{'amount': 2, 'price': 33.2},
{'amount': 3, 'price': 33.1},
{'amount': 5, 'price': 32.6},
{'amount': 10, 'price': 32.3}]}}
depths3 = {
'BitcoinCentralEUR':
{'asks': [{'amount': 1, 'price': 34.2},
{'amount': 2, 'price': 34.3},
{'amount': 3, 'price': 34.5},
{'amount': 3, 'price': 35.0}]},
'MtGoxEUR':
{'bids': [{'amount': 2, 'price': 33.2},
{'amount': 3, 'price': 33.1},
{'amount': 5, 'price': 32.6},
{'amount': 10, 'price': 32.3}]}}
class TestArbitrage(unittest.TestCase):
def setUp(self):
self.arbitrer = arbitrage.Arbitrer()
def test_getprofit1(self):
self.arbitrer.depths = depths2
profit, vol, wb, ws = self.arbitrer.get_profit_for(
0, 0, 'BitcoinCentralEUR', 'MtGoxEUR')
assert(80 == int(profit * 100))
assert(vol == 2)
def test_getprofit2(self):
self.arbitrer.depths = depths2
profit, vol, wb, ws = self.arbitrer.get_profit_for(
2, 1, 'BitcoinCentralEUR', 'MtGoxEUR')
assert(159 == int(profit * 100))
assert(vol == 5)
def test_getprofit3(self):
self.arbitrer.depths = depths3
profit, vol, wb, ws = self.arbitrer.get_profit_for(
2, 1, 'BitcoinCentralEUR', 'MtGoxEUR')
assert(profit == 0)
assert(vol == 0)
if __name__ == '__main__':
unittest.main()
| [
"maxime.biais@gmail.com"
] | maxime.biais@gmail.com |
88aaaf265c27f0e7826a4b1bda5b42dff316c456 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/3/hc1.py | 168bddc7a840e82f5abb3977a411aeb871b621cb | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'hC1':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
0facbb2cb2b1430ef5345e6a5b4328243b4b7126 | 6f331acb287b91ab6e297da88dc42489b256249d | /clear_complete_flag.py | a794ad6462bea3072c3185165a1ac24442468c37 | [
"MIT"
] | permissive | One-sixth/getchu_character_picture_grabber | 0cb650c209622600bd192614e83839eb3138e100 | 2a8d44ecf987ca51784449f9803c5e53d54a3e4e | refs/heads/master | 2020-06-19T15:04:41.074789 | 2020-01-14T03:21:38 | 2020-01-14T03:21:38 | 196,755,381 | 18 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,166 | py | '''
很多时候,爬虫内的忽略标志并不好用
'''
import os
stage1_complete_name = '.complete'
stage2_complete_name = '.complete'
stage3_complete_name = '.complete'
stage3_complete_pic_name = '.complete_pic'
# -----------------------------------------------------------
# 参数区
# 清除标志开关
clear_stage1_complete = False
clear_stage2_complete = False
clear_stage3_complete = False
clear_stage3_complete_pic = False
# -----------------------------------------------------------
dataset_root = 'dataset'
wait_to_delete_mark = []
# 使用一个大循环来解决这个问题
for company_id in os.listdir(dataset_root):
company_path = os.path.join(dataset_root, company_id)
if clear_stage1_complete and company_id == stage1_complete_name:
print('Wait to delete', company_path)
wait_to_delete_mark.append(company_path)
elif os.path.isdir(company_path):
# 如果是文件夹
for product_id in os.listdir(company_path):
product_path = os.path.join(company_path, product_id)
if clear_stage2_complete and product_id == stage2_complete_name:
print('Wait to delete', product_path)
wait_to_delete_mark.append(product_path)
elif os.path.isdir(product_path):
for file_name in os.listdir(product_path):
file_path = os.path.join(product_path, file_name)
if clear_stage3_complete and file_name == stage3_complete_name:
print('Wait to delete', file_path)
wait_to_delete_mark.append(file_path)
elif clear_stage3_complete_pic and file_name == stage3_complete_pic_name:
print('Wait to delete', file_path)
wait_to_delete_mark.append(file_path)
while True:
r = input('If you want to delete them.\nPlease input y to continue or n to cancel.\n')
if r == 'n':
print('Cancel')
exit(0)
elif r == 'y':
break
else:
print('Please input y or n.')
for f in wait_to_delete_mark:
# print(f)
os.remove(f)
print('Success')
| [
"One-sixth@users.noreply.github.com"
] | One-sixth@users.noreply.github.com |
11faeae5f596c6f2c9fb11540a28c64304960907 | 542ef68721859a6b60558b074f2b3a9b6a9535a6 | /NexusStoreBackend/userProfile/migrations/0011_alter_address_user.py | 1d755ab38f3d2c89b3cd8b2bbc748e6991d4df92 | [] | no_license | Raman9937/NexusStore | e0ae96cf0d9e1130eaa3c560808e467ba28dbb01 | ceed4324995dde7623774548e6fad968f93a2ff5 | refs/heads/main | 2023-06-01T04:49:00.832649 | 2021-06-19T04:14:50 | 2021-06-19T04:14:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 585 | py | # Generated by Django 3.2.4 on 2021-06-12 20:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('userProfile', '0010_auto_20210613_0125'),
]
operations = [
migrations.AlterField(
model_name='address',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
),
]
| [
"79391583+RitikVashisth007@users.noreply.github.com"
] | 79391583+RitikVashisth007@users.noreply.github.com |
f0bf9f11e977825e819d9ebc3d85c48b7b4dc035 | 1769a59bf55b04d358ba10759c596df4139dcd8b | /codeforces/594_div2/integerpoints.py | 849476900e25cf0b9e9f92316f062262022798d8 | [] | no_license | preetmishra/competitive-programming | 39c3b7b21d2b71589bd5e58989ce2f50ce400ecd | 7d014112a2e3f1bb5508c1e03378e13d94292f6c | refs/heads/master | 2021-07-14T15:24:22.417735 | 2020-09-29T15:58:57 | 2020-09-29T16:13:31 | 212,810,607 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | def count_even_odd(ls):
even, odd = 0, 0
for i in ls:
if i % 2 == 0:
even += 1
else:
odd += 1
return (even, odd)
for _ in range(int(input())):
p = int(input())
ps = list(map(int, input().split()))
q = int(input())
qs = list(map(int, input().split()))
even_p, odd_p = count_even_odd(ps)
even_q, odd_q = count_even_odd(qs)
print((even_p * even_q) + (odd_p * odd_q))
| [
"ipreetmishra@gmail.com"
] | ipreetmishra@gmail.com |
bd6759f3f1eccc5507bd0d1fdb5dd71df14f6cc9 | e1df12efffc353132b9996899f92d2e9186ded51 | /src/util.py | 12c542992c489e1bf186c4d00d225dd0b90cafeb | [
"MIT"
] | permissive | fracogno/UNet-ResWDCGAN | f67583a43a6b9e1c39ecf73b94360db9a54c1f4b | 000a6e82111ccffe6189ae270c2465b168918d9a | refs/heads/master | 2020-07-16T09:03:52.128729 | 2020-02-03T20:39:50 | 2020-02-03T20:39:50 | 205,760,133 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 721 | py | import numpy as np
import matplotlib.image as mpimg
import cv2
import pandas as pd
def sample_noise(size, mu=0., sigma=1.):
return np.random.normal(mu, sigma, size=size)
def getData(path, size, value="mel"):
DF = pd.read_pickle(path)
assert(len(DF["image"]) == len(DF["id"]))
X = []
for i in range(len(DF["image"])):
if DF["id"][i] == value:
tmp = cv2.resize(DF["image"][i], (int(size), int(size)), interpolation=cv2.INTER_CUBIC)
result = (tmp - 127.5) / 127.5
X.append(result)
return np.array(X, dtype=np.float32)
def saveImages(filename, images):
for i in range(len(images)):
mpimg.imsave(filename + "-" + str(i) + ".png", ( (images[i] * 127.5) + 127.5 ).astype(np.uint8) ) | [
"francesco.cognolato@hotmail.com"
] | francesco.cognolato@hotmail.com |
de5121c04015867415044179babf4281786f2d99 | 0c81c371ce9023f3915f2b05c1c4ca7b9f435cf4 | /tools/migrate/rake.py | 577567fe8e2bbd01bc7f896d19a4c5e1de1480ff | [] | no_license | linbirg/om | 831fb7458d1c0176fd76ba4b6f739e95f9593788 | fd1328ebe3fb38893bf88519ca10768507519494 | refs/heads/master | 2021-06-30T16:22:50.327937 | 2020-09-14T07:36:36 | 2020-09-14T07:36:36 | 159,285,543 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,080 | py | #!/usr/bin/python3
# -*- coding:utf-8 -*-
# Author: yizr
import os
import sys
__abs_file__ = os.path.abspath(__file__)
migrate_dir = os.path.dirname(__abs_file__)
tool_dir = os.path.dirname(migrate_dir)
code_dir = os.path.dirname(tool_dir)
sys.path.append(code_dir)
import re
import importlib
import importlib.util
# from lib.yom import DDL, Model
# from lib import dbutil as db
from tools.migrate.rake_migrate import RakeMigrate as Migrate
# class Rake(object):
# def __init__(self):
# self.__regx__ = r'(.*)(_)([0-9]+)(\.py)$'
# self.__p__ = re.compile(self.__regx__)
# return super().__init__()
# def rake(self):
# for task in self.tasks:
# task.rake()
def get_current_path():
# __abs_file__ = os.path.abspath(__file__)
# __cur_path__ = os.path.dirname(__abs_file__)
__cur_path__ = os.getcwd()
return __cur_path__
def dir_file(path):
pathDir = os.listdir(path)
return pathDir
# 下划线[num]_.py结尾
def is_name_numberd(name):
regx = r'^([0-9]+)(_)(.*)(\.py)$'
ma = re.match(regx, name)
return True if ma else False
def parse_number(name):
regx = r'^([0-9]+)(_)(.*)(\.py)$'
ma = re.match(regx, name)
if ma:
return int(ma.group(1))
def parse_module_name(file_name):
regx = r'^(.*)(\.py)$'
ma = re.match(regx, file_name)
if not ma:
return None
return ma.group(1)
def _sort_(list_names):
return sorted(list_names, key=lambda n: parse_number(n))
def list_all_migration_files(path):
files = dir_file(path)
migs = list(filter(lambda f: is_name_numberd(f), files))
return migs
def check_module(module_name):
"""
Checks if module can be imported without actually
importing it
"""
module_spec = importlib.util.find_spec(module_name)
if module_spec is None:
print("Module: {} not found".format(module_name))
return None
else:
print("Module: {} can be imported".format(module_name))
return module_spec
def import_module_from_spec(module_spec):
"""
Import the module via the passed in module specification
Returns the newly imported module
"""
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
return module
def loader(mdl_name):
module_spec = check_module(mdl_name)
if module_spec:
module = import_module_from_spec(module_spec)
return module
return None
# return importlib.reload(mdl_name)
def list_all_klass(module):
if type(module) == str:
module = loader(module)
klass = []
for name in dir(module):
var = getattr(module, name)
if isinstance(var, type):
klass.append(name)
return klass
def is_child_of(obj, cls):
try:
for i in obj.__bases__:
if i is cls or isinstance(i, cls):
return True
for i in obj.__bases__:
if is_child_of(i, cls):
return True
except AttributeError:
return is_child_of(obj.__class__, cls)
return False
def get_all_klass_type_in(module, klass=None):
if type(module) == str:
module = loader(module)
if not klass:
klass = type
klasses = []
for name in dir(module):
var = getattr(module, name)
if is_child_of(var, klass):
klasses.append(var)
return klasses
def max_number(path=None):
if path is None:
path = get_current_path()
childs = list_all_migration_files(path)
if len(childs) > 0:
sorted_childs = _sort_(childs)
num = parse_number(sorted_childs[-1])
return num
return 0
# def main(path=None):
# if path is None:
# path = get_cuurent_path()
# # print(path)
# childs = list_all_migration_files(path)
# sorted_childs = _sort_(childs)
# for f in sorted_childs:
# mdl = loader(parse_module_name(f))
# klasss = get_all_klass_type_in(mdl, Migrate)
# for k in klasss:
# # print(k)
# k().down()
def change_to_camel(name, sep='_'):
string_list = str(name).split(sep) # 将字符串转化为list
first = string_list[0].lower()
others = string_list[1:]
# str.capitalize():将字符串的首字母转化为大写
others_capital = [word.capitalize() for word in others]
others_capital[0:0] = [first]
# 将list组合成为字符串,中间无连接符。
hump_string = ''.join(others_capital)
return hump_string
# 包含下划线则认为是
def is_slash_name(name):
return '_' in name
def change_to_slash_name(name):
if is_slash_name(name):
return name.lower()
listx = name[0:len(name)]
listy = listx[0]
for i in range(1, len(listx) - 1):
# listx[i] 直接copy 或 先加'_'再copy
if listx[i].isupper(
) and not listx[i - 1].isupper(): # 加'_',当前为大写,前一个字母为小写
listy += '_'
listy += listx[i]
elif listx[i].isupper() and listx[i -
1].isupper() and listx[i +
1].islower():
# 加'_',当前为大写,前一个字母为小写
listy += '_'
listy += listx[i]
else:
listy += listx[i]
return listy.lower()
def generate_file(name='migrate_task', path=None):
if not path:
path = get_current_path()
slash_name = change_to_slash_name(name)
numbered_name = '%d_%s.py' % (max_number(path) + 1, slash_name)
full_path = os.path.sep.join([path, numbered_name])
with open(full_path, 'w') as f:
tmps = """#!/usr/bin/python3
# -*- coding:utf-8 -*-
# Author: yizr
import os
import sys
__abs_file__ = os.path.abspath(__file__)
tool_dir = os.path.dirname(os.path.dirname(__abs_file__))
code_dir = os.path.dirname(tool_dir)
sys.path.append(code_dir)
from tools.migrate.rake_migrate import RakeMigrate
from lib import dbutil
import module.dao.base.pg_field_desc as fd
class %s(RakeMigrate):
def __init__(self):
super().__init__()
self.db_conn = dbutil.get_connection('risk_db')
def up(self):
self.create_table('%s',*columns, fd.UpdateAtField(), fd.CreateAtField())
def down(self):
self.drop('%s')
"""
class_name = change_to_camel(slash_name)
tmps = tmps % (class_name, slash_name, slash_name)
f.write(tmps)
def run_migrate(path=None):
if path is None:
path = get_current_path()
childs = list_all_migration_files(path)
sorted_childs = _sort_(childs)
for f in sorted_childs:
mdl = loader(parse_module_name(f))
klasss = get_all_klass_type_in(mdl, Migrate)
for k in klasss:
obj = k()
obj.down()
obj.up()
def run_rollback(path=None):
if path is None:
path = get_current_path()
childs = list_all_migration_files(path)
sorted_childs = _sort_(childs)
for f in reversed(sorted_childs):
mdl = loader(parse_module_name(f))
klasss = get_all_klass_type_in(mdl, Migrate)
for k in klasss:
obj = k()
obj.down()
def print_usage():
print('usage python rake.py [cmd]')
print('[cmd]:')
print(' g: generate content eg: g create_table_risk_order')
print(' m: excute all migration by order.')
print(' r: rollback by desc order.')
def console(args):
# run_migrate(path=None)
if len(args) <= 1:
print_usage()
return
if args[1] == 'g':
path = None
if len(args) > 3:
path = args[3]
generate_file(name=args[2], path=path)
if args[1] == 'm':
path = None
if len(args) > 2:
path = args[2]
run_migrate(path=path)
if args[1] == 'r':
path = None
if len(args) > 2:
path = args[2]
run_rollback(path=path)
if __name__ == '__main__':
console(sys.argv)
| [
"linbirg@gmail.com"
] | linbirg@gmail.com |
2549239c2cb24167a54487c274b0d455622f7692 | 32ef8621468095bf9c6dd912767cb97e9863dc25 | /python/iterables-and-iterators.py | 31978c2aea6a3a158f486b5f938059dabb494a54 | [] | no_license | Seungju182/Hackerrank | 286f1666be5797c1d318788753245696ef52decf | 264533f97bcc8dc771e4e6cbae1937df8ce6bafa | refs/heads/master | 2023-08-17T22:49:58.710410 | 2021-10-25T09:40:46 | 2021-10-25T09:40:46 | 337,652,088 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | # Enter your code here. Read input from STDIN. Print output to STDOUT
from itertools import combinations
if __name__ == "__main__":
N = int(input())
letters = input().split()
K = int(input())
list_comb = list(combinations(letters, K))
print(len([c for c in list_comb if 'a' in c]) / len(list_comb))
| [
"tonysj@snu.ac.kr"
] | tonysj@snu.ac.kr |
fbf87fadcf29cafc4cea4e426184591e1c1e483d | 6d920708bcf248e51734bb87f4032dd070603500 | /cats_library/tests/__init__.py | fe243b030cfec515c65f2a517d0b9d6490f1cb14 | [] | no_license | zhenv5/cats | 63e059ccb1bd84fc5225fce62812cbbc3bc0a724 | ff15338b15b437d56667e45c1bb28bc99b04aed0 | refs/heads/master | 2020-12-02T19:33:23.439128 | 2017-04-18T12:32:05 | 2017-04-18T12:32:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | from __future__ import division, print_function, absolute_import
import numpy
__author__ = 'Alex Rogozhnikov'
def generate_dataset(n_samples=10000, n_features=3, n_categories=10):
data = numpy.random.randint(0, n_categories, size=[n_samples, n_features])
cat_predictions = numpy.random.normal(size=n_categories)
y = cat_predictions[data].sum(axis=1)
return data, y
| [
"axelr@man1-ipython01.cern.dev.yandex.net"
] | axelr@man1-ipython01.cern.dev.yandex.net |
c880e573fe1fd5efaf1aa84c17199fc993c042a4 | 75a26c994df254392c9dde503278f772116da078 | /main/setup.py | 4bf5a7291b5f2706a5f4ef6f56f2bf2497111a54 | [] | no_license | esfamely/es_face_server | 107f5905f0243bc21914d79f0f3fcfc164886f93 | 3c756d00c83cd0a8dd745fd32a074c9121977ab8 | refs/heads/master | 2020-12-11T04:42:39.995194 | 2020-01-17T07:02:58 | 2020-01-17T07:02:58 | 233,778,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,852 | py | class Setup:
"""
系统运行参数
"""
@staticmethod
def export(startswith="s"):
dict = {}
for item in Setup.__dict__.items():
if str(item[0]).startswith(startswith):
dict[item[0]] = item[1]
return dict
"""
系统运行平台
"""
# 数据库IP
s0_db_ip = "192.168.1.141"
# 数据库名
s0_db_name = "es_face"
# 数据库账号
s0_db_login = "es"
# 数据库密码
s0_db_pw = "123456"
"""
人脸采集器
"""
# 人脸检测haar cascade xml路径
s1_cascade_path = "../static/opencv/haarcascade_frontalface_default.xml"
# 缩放因子
s1_factor_mini = 0.25
# 人脸边界预留宽度
s1_border = 50
# 人脸图像统一尺寸
s1_size = 160
# 每隔几帧保存一次人脸图像
s1_frame_cc = 3
# 保存满几张就提交
s1_submit_cc = 3
# 图像相似度阀值
s1_distance_t = 63.0
"""
数据处理器
"""
# 人脸图像统一尺寸
s3_size = 160
# 人脸数据存放路径
s3_face_dir = "../web/static/dataset/face_" + str(s3_size)
# 缩放因子
s3_factor_mini = 0.5
# 最小检测尺寸
s3_minsize = 10
# 三个cnn的检测阀值
s3_threshold = [0.6, 0.7, 0.7]
# 尺度因子
s3_factor = 0.709
# 图像相似度阀值
s3_distance_t = 63.0
"""
识别模型
"""
# 是否使用特征提取法
s4_use_feature_extract = 1
# 特征文件存放路径
s4_feature_dir = "../dataset/face_{}_feature".format(s3_size)
# Facenet特征提取器已训练模型路径
s4_facenet_model_path = "../../facenet/models/20180402-114759/20180402-114759.pb"
# Facenet特征提取每批大小
s4_facenet_batch_size = 16
# 距离阀值
s4_distance_threshold = 1.05
| [
"292576631@qq.com"
] | 292576631@qq.com |
48b92115bf430ccfea52c8555cc132c5c87b9921 | 885a0893693db9298916b054cc74dd4aaa3a8e3c | /app.py | a418be0ec9a23201d1e49699fc6e8da53e06e24f | [
"Apache-2.0"
] | permissive | sshah98/NLP-Writing-Tool | 294fe79ea570d39ae4cb51e2267cc733f4ed0d9c | be85b7ad2fd0285d12c5a5348cc135819f7722fb | refs/heads/master | 2021-03-27T19:23:38.838339 | 2018-09-15T19:55:16 | 2018-09-15T19:55:16 | 111,587,608 | 0 | 0 | Apache-2.0 | 2018-09-25T21:45:24 | 2017-11-21T18:42:53 | Python | UTF-8 | Python | false | false | 1,831 | py | import os
from flask import Flask, url_for, render_template, request, redirect, session, Markup, flash
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "nlp-analysis-key.json"
app.secret_key = 'random-key'
from google_npl import GoogleNLP
from email_stats import EmailStats
@app.route('/', methods=['POST', 'GET'])
def index():
if request.method == "POST":
if not request.form['writing']:
return render_template('index.html')
else:
try:
user_text = request.form['writing']
word_count = EmailStats(user_text).word_count()
get_text_easiness = EmailStats(user_text).get_text_easiness()
sentence_count = EmailStats(user_text).sentence_count()
subjectivity = EmailStats(user_text).subjectivity()
complex_words = EmailStats(user_text).complex_words()
sentiment = GoogleNLP(user_text).sentiment_text()
sentiment_score, sentiment_mag = sentiment.split(' ')
print(word_count, get_text_easiness, sentence_count, subjectivity, complex_words, sentiment_score, sentiment_mag)
return render_template('index.html', results=[('Word Count', word_count), ('Sentence Count', sentence_count), ('Readability', get_text_easiness), ('Subjectivity', subjectivity), ('Complex Words', complex_words), ('Sentiment Score', sentiment_score), ('Sentiment Strength', sentiment_mag)])
except Exception as e:
flash('Error', e)
return render_template('index.html')
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
| [
"shahsuraj261@gmail.com"
] | shahsuraj261@gmail.com |
86f2fa42be3a378d013ce34ab1665eeb061a4de5 | f07b7f546278c86ec91fe9fdacbe4acc266b5ef0 | /blog/blog/wvenv/Lib/site-packages/spyder/app/tour.py | 08ccd027dbdc5cb3695aea84507318de7ace5723 | [] | no_license | CankayaUniversity/ceng-407-408-2019-2020-Patent-Comparison-System | 0386a6d8651a9ce875a9cf56013c19d8242204c9 | d9c0f2d84d90932b962a0618b01652f3bd560f25 | refs/heads/master | 2020-08-18T09:55:23.676188 | 2020-06-27T21:19:20 | 2020-06-27T21:19:20 | 215,772,427 | 2 | 4 | null | 2020-06-27T21:26:31 | 2019-10-17T11:08:50 | null | UTF-8 | Python | false | false | 47,982 | py | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""Spyder interactive tours"""
# pylint: disable=C0103
# pylint: disable=R0903
# pylint: disable=R0911
# pylint: disable=R0201
# Standard library imports
from __future__ import division
import sys
from math import ceil
# Third party imports
from qtpy.QtCore import (QEasingCurve, QPoint, QPropertyAnimation, QRectF, Qt,
Signal)
from qtpy.QtGui import (QBrush, QColor, QIcon, QPainter, QPainterPath, QPen,
QPixmap, QRegion)
from qtpy.QtWidgets import (QAction, QApplication, QComboBox, QDialog,
QGraphicsOpacityEffect, QHBoxLayout, QLabel,
QLayout, QMainWindow, QMenu, QPushButton,
QSpacerItem, QToolButton, QVBoxLayout, QWidget)
# Local imports
from spyder.config.base import _, get_image_path
from spyder.config.gui import is_dark_interface
from spyder.py3compat import to_binary_string
from spyder.utils.qthelpers import add_actions, create_action
from spyder.utils import icon_manager as ima
if is_dark_interface():
MAIN_TOP_COLOR = MAIN_BG_COLOR = QColor.fromRgb(25, 35, 45)
else:
MAIN_TOP_COLOR = QColor.fromRgb(230, 230, 230)
MAIN_BG_COLOR = QColor.fromRgb(255, 255, 255)
# FIXME: Known issues
# How to handle if an specific dockwidget does not exists/load, like ipython
# on python3.3, should that frame be removed? should it display a warning?
class SpyderWidgets(object):
"""List of supported widgets to highlight/decorate"""
# Panes
ipython_console = 'ipyconsole'
editor = 'editor'
editor_line_number_area = 'editor.get_current_editor().linenumberarea'
editor_scroll_flag_area = 'editor.get_current_editor().scrollflagarea'
file_explorer = 'explorer'
help_plugin = 'help'
variable_explorer = 'variableexplorer'
history_log = "historylog"
# Toolbars
toolbars = ''
toolbars_active = ''
toolbar_file = ''
toolbar_edit = ''
toolbar_run = ''
toolbar_debug = ''
toolbar_main = ''
status_bar = ''
menu_bar = ''
menu_file = ''
menu_edit = ''
def get_tours(index=None):
"""
Get the list of available tours (if index=None), or the your given by
index
"""
return get_tour(index)
def get_tour(index):
"""
This function generates a list of tours.
The index argument is used to retrieve a particular tour. If None is
passed, it will return the full list of tours. If instead -1 is given,
this function will return a test tour
To add more tours a new variable needs to be created to hold the list of
dicts and the tours variable at the bottom of this function needs to be
updated accordingly
"""
sw = SpyderWidgets
qtconsole_link = "https://qtconsole.readthedocs.io/en/stable/index.html"
# This test should serve as example of keys to use in the tour frame dics
test = [{'title': "Welcome to Spyder introduction tour",
'content': "<b>Spyder</b> is an interactive development \
environment. This tip panel supports rich text. <br>\
<br> it also supports image insertion to the right so\
far",
'image': 'tour-spyder-logo.png'},
{'title': "Widget display",
'content': ("This show how a widget is displayed. The tip panel "
"is adjusted based on the first widget in the list"),
'widgets': ['button1'],
'decoration': ['button2'],
'interact': True},
{'title': "Widget display",
'content': ("This show how a widget is displayed. The tip panel "
"is adjusted based on the first widget in the list"),
'widgets': ['button1'],
'decoration': ['button1'],
'interact': True},
{'title': "Widget display",
'content': ("This show how a widget is displayed. The tip panel "
"is adjusted based on the first widget in the list"),
'widgets': ['button1'],
'interact': True},
{'title': "Widget display and highlight",
'content': "This shows how a highlighted widget looks",
'widgets': ['button'],
'decoration': ['button'],
'interact': False},
]
intro = [{'title': _("Welcome to the Introduction tour"),
'content': _("<b>Spyder</b> is a powerful Interactive "
"Development Environment (or IDE) for the Python "
"programming language.<br><br>"
"Here we are going to guide you through its most "
"important features.<br><br>"
"Please use the arrow keys or click on the buttons "
"below to move along the tour."),
'image': 'tour-spyder-logo.png'},
{'title': _("The Editor"),
'content': _("This is the pane where you write Python code before "
"evaluating it. You can get automatic suggestions "
"and completions while writing, by pressing the "
"<b>Tab</b> key next to a given text.<br><br>"
"The Editor comes "
"with a line number area (highlighted here in red), "
"where Spyder shows warnings and syntax errors. They "
"can help you to detect potential problems before "
"running the code.<br><br>"
"You can also set debug breakpoints in the line "
"number area, by doing a double click next to "
"a non-empty line."),
'widgets': [sw.editor],
'decoration': [sw.editor_line_number_area]},
{'title': _("The IPython console"),
'content': _("This is one of panes where you can run or "
"execute the code you wrote on the Editor. To do it "
"you need to press the <b>F5</b> key.<br><br>"
"This console comes with several "
"useful features that greatly improve your "
"programming workflow (like syntax highlighting and "
"inline plots). If you want to know more about them, "
"please follow this <a href=\"{0}\">link</a>.<br><br>"
"Please click on the button below to run some simple "
"code in this console. This will be useful to show "
"you other important features.").format(
qtconsole_link),
'widgets': [sw.ipython_console],
'run': ["li = list(range(100))", "d = {'a': 1, 'b': 2}"]
},
{'title': _("The Variable Explorer"),
'content': _("In this pane you can view and edit the variables "
"generated during the execution of a program, or "
"those entered directly in one of Spyder "
"consoles.<br><br>"
"As you can see, the Variable Explorer is showing "
"the variables generated during the last step of "
"this tour. By doing a double-click on any "
"of them, a new window will be opened, where you "
"can inspect and modify their contents."),
'widgets': [sw.variable_explorer],
'interact': True},
{'title': _("Help"),
'content': _("This pane displays documentation of the "
"functions, classes, methods or modules you are "
"currently using in the Editor or the Consoles.<br><br>"
"To use it, you need to press <b>Ctrl+I</b> in "
"front of an object. If that object has some "
"documentation associated with it, it will be "
"displayed here."),
'widgets': [sw.help_plugin],
'interact': True},
{'title': _("The File Explorer"),
'content': _("This pane lets you navigate through the directories "
"and files present in your computer.<br><br>"
"You can also open any of these files with its "
"corresponding application, by doing a double "
"click on it.<br><br>"
"There is one exception to this rule: plain-text "
"files will always be opened in the Spyder Editor."),
'widgets': [sw.file_explorer],
'interact': True},
{'title': _("The History Log"),
'content': _("This pane records all commands introduced in "
"the Python and IPython consoles."),
'widgets': [sw.history_log],
'interact': True},
]
# ['The run toolbar',
# 'Should be short',
# ['self.run_toolbar'], None],
# ['The debug toolbar',
# '',
# ['self.debug_toolbar'], None],
# ['The main toolbar',
# '',
# ['self.main_toolbar'], None],
# ['The editor',
# 'Spyder has differnet bla bla bla',
# ['self.editor.dockwidget'], None],
# ['The editor',
# 'Spyder has differnet bla bla bla',
# ['self.outlineexplorer.dockwidget'], None],
#
# ['The menu bar',
# 'Spyder has differnet bla bla bla',
# ['self.menuBar()'], None],
#
# ['The menu bar',
# 'Spyder has differnet bla bla bla',
# ['self.statusBar()'], None],
#
#
# ['The toolbars!',
# 'Spyder has differnet bla bla bla',
# ['self.variableexplorer.dockwidget'], None],
# ['The toolbars MO!',
# 'Spyder has differnet bla bla bla',
# ['self.extconsole.dockwidget'], None],
# ['The whole window?!',
# 'Spyder has differnet bla bla bla',
# ['self'], None],
# ['Lets try something!',
# 'Spyder has differnet bla bla bla',
# ['self.extconsole.dockwidget',
# 'self.variableexplorer.dockwidget'], None]
#
# ]
feat30 = [{'title': "New features in Spyder 3.0",
'content': _("<b>Spyder</b> is an interactive development "
"environment based on bla"),
'image': 'spyder.png'},
{'title': _("Welcome to Spyder introduction tour"),
'content': _("Spyder is an interactive development environment "
"based on bla"),
'widgets': ['variableexplorer']},
]
tours = [{'name': _('Introduction tour'), 'tour': intro},
{'name': _('New features in version 3.0'), 'tour': feat30}]
if index is None:
return tours
elif index == -1:
return [test]
else:
return [tours[index]]
class FadingDialog(QDialog):
"""A general fade in/fade out QDialog with some builtin functions"""
sig_key_pressed = Signal()
def __init__(self, parent, opacity, duration, easing_curve):
super(FadingDialog, self).__init__(parent)
self.parent = parent
self.opacity_min = min(opacity)
self.opacity_max = max(opacity)
self.duration_fadein = duration[0]
self.duration_fadeout = duration[-1]
self.easing_curve_in = easing_curve[0]
self.easing_curve_out = easing_curve[-1]
self.effect = None
self.anim = None
self._fade_running = False
self._funcs_before_fade_in = []
self._funcs_after_fade_in = []
self._funcs_before_fade_out = []
self._funcs_after_fade_out = []
self.setModal(False)
def _run(self, funcs):
""" """
for func in funcs:
func()
def _run_before_fade_in(self):
""" """
self._run(self._funcs_before_fade_in)
def _run_after_fade_in(self):
""" """
self._run(self._funcs_after_fade_in)
def _run_before_fade_out(self):
""" """
self._run(self._funcs_before_fade_out)
def _run_after_fade_out(self):
""" """
self._run(self._funcs_after_fade_out)
def _set_fade_finished(self):
""" """
self._fade_running = False
def _fade_setup(self):
""" """
self._fade_running = True
self.effect = QGraphicsOpacityEffect(self)
self.setGraphicsEffect(self.effect)
self.anim = QPropertyAnimation(self.effect, to_binary_string("opacity"))
# --- public api
def fade_in(self, on_finished_connect):
""" """
self._run_before_fade_in()
self._fade_setup()
self.show()
self.raise_()
self.anim.setEasingCurve(self.easing_curve_in)
self.anim.setStartValue(self.opacity_min)
self.anim.setEndValue(self.opacity_max)
self.anim.setDuration(self.duration_fadein)
self.anim.finished.connect(on_finished_connect)
self.anim.finished.connect(self._set_fade_finished)
self.anim.finished.connect(self._run_after_fade_in)
self.anim.start()
def fade_out(self, on_finished_connect):
""" """
self._run_before_fade_out()
self._fade_setup()
self.anim.setEasingCurve(self.easing_curve_out)
self.anim.setStartValue(self.opacity_max)
self.anim.setEndValue(self.opacity_min)
self.anim.setDuration(self.duration_fadeout)
self.anim.finished.connect(on_finished_connect)
self.anim.finished.connect(self._set_fade_finished)
self.anim.finished.connect(self._run_after_fade_out)
self.anim.start()
def is_fade_running(self):
""" """
return self._fade_running
def set_funcs_before_fade_in(self, funcs):
""" """
self._funcs_before_fade_in = funcs
def set_funcs_after_fade_in(self, funcs):
""" """
self._funcs_after_fade_in = funcs
def set_funcs_before_fade_out(self, funcs):
""" """
self._funcs_before_fade_out = funcs
def set_funcs_after_fade_out(self, funcs):
""" """
self._funcs_after_fade_out = funcs
class FadingCanvas(FadingDialog):
"""The black semi transparent canvas that covers the application"""
def __init__(self, parent, opacity, duration, easing_curve, color,
tour=None):
"""Create a black semi transparent canvas that covers the app."""
super(FadingCanvas, self).__init__(parent, opacity, duration,
easing_curve)
self.parent = parent
self.tour = tour
self.color = color # Canvas color
self.color_decoration = Qt.red # Decoration color
self.stroke_decoration = 2 # width in pixels for decoration
self.region_mask = None
self.region_subtract = None
self.region_decoration = None
self.widgets = None # The widget to uncover
self.decoration = None # The widget to draw decoration
self.interaction_on = False
self.path_current = None
self.path_subtract = None
self.path_full = None
self.path_decoration = None
# widget setup
self.setWindowFlags(Qt.Dialog | Qt.FramelessWindowHint)
self.setAttribute(Qt.WA_TranslucentBackground)
self.setAttribute(Qt.WA_TransparentForMouseEvents)
self.setModal(False)
self.setFocusPolicy(Qt.NoFocus)
self.set_funcs_before_fade_in([self.update_canvas])
self.set_funcs_after_fade_out([lambda: self.update_widgets(None),
lambda: self.update_decoration(None)])
def set_interaction(self, value):
""" """
self.interaction_on = value
def update_canvas(self):
""" """
w, h = self.parent.size().width(), self.parent.size().height()
self.path_full = QPainterPath()
self.path_subtract = QPainterPath()
self.path_decoration = QPainterPath()
self.region_mask = QRegion(0, 0, w, h)
self.path_full.addRect(0, 0, w, h)
# Add the path
if self.widgets is not None:
for widget in self.widgets:
temp_path = QPainterPath()
# if widget is not found... find more general way to handle
if widget is not None:
widget.raise_()
widget.show()
geo = widget.frameGeometry()
width, height = geo.width(), geo.height()
point = widget.mapTo(self.parent, QPoint(0, 0))
x, y = point.x(), point.y()
temp_path.addRect(QRectF(x, y, width, height))
temp_region = QRegion(x, y, width, height)
if self.interaction_on:
self.region_mask = self.region_mask.subtracted(temp_region)
self.path_subtract = self.path_subtract.united(temp_path)
self.path_current = self.path_full.subtracted(self.path_subtract)
else:
self.path_current = self.path_full
if self.decoration is not None:
for widget in self.decoration:
temp_path = QPainterPath()
widget.raise_()
widget.show()
geo = widget.frameGeometry()
width, height = geo.width(), geo.height()
point = widget.mapTo(self.parent, QPoint(0, 0))
x, y = point.x(), point.y()
temp_path.addRect(QRectF(x, y, width, height))
temp_region_1 = QRegion(x-1, y-1, width+2, height+2)
temp_region_2 = QRegion(x+1, y+1, width-2, height-2)
temp_region = temp_region_1.subtracted(temp_region_2)
if self.interaction_on:
self.region_mask = self.region_mask.united(temp_region)
self.path_decoration = self.path_decoration.united(temp_path)
else:
self.path_decoration.addRect(0, 0, 0, 0)
# Add a decoration stroke around widget
self.setMask(self.region_mask)
self.update()
self.repaint()
def update_widgets(self, widgets):
""" """
self.widgets = widgets
def update_decoration(self, widgets):
""" """
self.decoration = widgets
def paintEvent(self, event):
"""Override Qt method"""
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing)
# Decoration
painter.fillPath(self.path_current, QBrush(self.color))
painter.strokePath(self.path_decoration, QPen(self.color_decoration,
self.stroke_decoration))
# decoration_fill = QColor(self.color_decoration)
# decoration_fill.setAlphaF(0.25)
# painter.fillPath(self.path_decoration, decoration_fill)
def reject(self):
"""Override Qt method"""
if not self.is_fade_running():
key = Qt.Key_Escape
self.key_pressed = key
self.sig_key_pressed.emit()
def mousePressEvent(self, event):
"""Override Qt method"""
pass
def focusInEvent(self, event):
"""Override Qt method."""
# To be used so tips do not appear outside spyder
if self.hasFocus():
self.tour.gain_focus()
def focusOutEvent(self, event):
"""Override Qt method."""
# To be used so tips do not appear outside spyder
if self.tour.step_current != 0:
self.tour.lost_focus()
class FadingTipBox(FadingDialog):
""" """
def __init__(self, parent, opacity, duration, easing_curve, tour=None,
color_top=None, color_back=None, combobox_background=None):
super(FadingTipBox, self).__init__(parent, opacity, duration,
easing_curve)
self.holder = self.anim # needed for qt to work
self.parent = parent
self.tour = tour
self.frames = None
self.offset_shadow = 0
self.fixed_width = 300
self.key_pressed = None
self.setAttribute(Qt.WA_TranslucentBackground)
self.setWindowFlags(Qt.Dialog | Qt.FramelessWindowHint |
Qt.WindowStaysOnTopHint)
self.setModal(False)
# Widgets
def toolbutton(icon):
bt = QToolButton()
bt.setAutoRaise(True)
bt.setIcon(icon)
return bt
self.button_close = toolbutton(ima.icon("tour.close"))
self.button_home = toolbutton(ima.icon("tour.home"))
self.button_previous = toolbutton(ima.icon("tour.previous"))
self.button_end = toolbutton(ima.icon("tour.end"))
self.button_next = toolbutton(ima.icon("tour.next"))
self.button_run = QPushButton(_('Run code'))
self.button_disable = None
self.button_current = QToolButton()
self.label_image = QLabel()
self.label_title = QLabel()
self.combo_title = QComboBox()
self.label_current = QLabel()
self.label_content = QLabel()
self.label_content.setMinimumWidth(self.fixed_width)
self.label_content.setMaximumWidth(self.fixed_width)
self.label_current.setAlignment(Qt.AlignCenter)
self.label_content.setWordWrap(True)
self.widgets = [self.label_content, self.label_title,
self.label_current, self.combo_title,
self.button_close, self.button_run, self.button_next,
self.button_previous, self.button_end,
self.button_home, self.button_current]
arrow = get_image_path('hide.png')
self.color_top = color_top
self.color_back = color_back
self.combobox_background = combobox_background
self.stylesheet = '''QComboBox {{
padding-left: 5px;
background-color: {}
border-width: 0px;
border-radius: 0px;
min-height:20px;
max-height:20px;
}}
QComboBox::drop-down {{
subcontrol-origin: padding;
subcontrol-position: top left;
border-width: 0px;
}}
QComboBox::down-arrow {{
image: url({});
}}
'''.format(self.combobox_background.name(), arrow)
# Windows fix, slashes should be always in unix-style
self.stylesheet = self.stylesheet.replace('\\', '/')
self.setFocusPolicy(Qt.StrongFocus)
for widget in self.widgets:
widget.setFocusPolicy(Qt.NoFocus)
widget.setStyleSheet(self.stylesheet)
layout_top = QHBoxLayout()
layout_top.addWidget(self.combo_title)
layout_top.addStretch()
layout_top.addWidget(self.button_close)
layout_top.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout_content = QHBoxLayout()
layout_content.addWidget(self.label_content)
layout_content.addWidget(self.label_image)
layout_content.addSpacerItem(QSpacerItem(5, 5))
layout_run = QHBoxLayout()
layout_run.addStretch()
layout_run.addWidget(self.button_run)
layout_run.addStretch()
layout_run.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout_navigation = QHBoxLayout()
layout_navigation.addWidget(self.button_home)
layout_navigation.addWidget(self.button_previous)
layout_navigation.addStretch()
layout_navigation.addWidget(self.label_current)
layout_navigation.addStretch()
layout_navigation.addWidget(self.button_next)
layout_navigation.addWidget(self.button_end)
layout_navigation.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout = QVBoxLayout()
layout.addLayout(layout_top)
layout.addStretch()
layout.addSpacerItem(QSpacerItem(15, 15))
layout.addLayout(layout_content)
layout.addLayout(layout_run)
layout.addStretch()
layout.addSpacerItem(QSpacerItem(15, 15))
layout.addLayout(layout_navigation)
layout.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout.setSizeConstraint(QLayout.SetFixedSize)
self.setLayout(layout)
self.set_funcs_before_fade_in([self._disable_widgets])
self.set_funcs_after_fade_in([self._enable_widgets, self.setFocus])
self.set_funcs_before_fade_out([self._disable_widgets])
self.setContextMenuPolicy(Qt.CustomContextMenu)
# signals and slots
# These are defined every time by the AnimatedTour Class
def _disable_widgets(self):
""" """
for widget in self.widgets:
widget.setDisabled(True)
def _enable_widgets(self):
""" """
self.setWindowFlags(Qt.Dialog | Qt.FramelessWindowHint |
Qt.WindowStaysOnTopHint)
for widget in self.widgets:
widget.setDisabled(False)
if self.button_disable == 'previous':
self.button_previous.setDisabled(True)
self.button_home.setDisabled(True)
elif self.button_disable == 'next':
self.button_next.setDisabled(True)
self.button_end.setDisabled(True)
def set_data(self, title, content, current, image, run, frames=None,
step=None):
""" """
self.label_title.setText(title)
self.combo_title.clear()
self.combo_title.addItems(frames)
self.combo_title.setCurrentIndex(step)
# min_content_len = max([len(f) for f in frames])
# self.combo_title.setMinimumContentsLength(min_content_len)
# Fix and try to see how it looks with a combo box
self.label_current.setText(current)
self.button_current.setText(current)
self.label_content.setText(content)
self.image = image
if image is None:
self.label_image.setFixedHeight(1)
self.label_image.setFixedWidth(1)
else:
extension = image.split('.')[-1]
self.image = QPixmap(get_image_path(image), extension)
self.label_image.setPixmap(self.image)
self.label_image.setFixedSize(self.image.size())
if run is None:
self.button_run.setVisible(False)
else:
self.button_run.setDisabled(False)
self.button_run.setVisible(True)
# Refresh layout
self.layout().activate()
def set_pos(self, x, y):
""" """
self.x = ceil(x)
self.y = ceil(y)
self.move(QPoint(self.x, self.y))
def build_paths(self):
""" """
geo = self.geometry()
radius = 0
shadow = self.offset_shadow
x0, y0 = geo.x(), geo.y()
width, height = geo.width() - shadow, geo.height() - shadow
left, top = 0, 0
right, bottom = width, height
self.round_rect_path = QPainterPath()
self.round_rect_path.moveTo(right, top + radius)
self.round_rect_path.arcTo(right-radius, top, radius, radius, 0.0,
90.0)
self.round_rect_path.lineTo(left+radius, top)
self.round_rect_path.arcTo(left, top, radius, radius, 90.0, 90.0)
self.round_rect_path.lineTo(left, bottom-radius)
self.round_rect_path.arcTo(left, bottom-radius, radius, radius, 180.0,
90.0)
self.round_rect_path.lineTo(right-radius, bottom)
self.round_rect_path.arcTo(right-radius, bottom-radius, radius, radius,
270.0, 90.0)
self.round_rect_path.closeSubpath()
# Top path
header = 36
offset = 2
left, top = offset, offset
right = width - (offset)
self.top_rect_path = QPainterPath()
self.top_rect_path.lineTo(right, top + radius)
self.top_rect_path.moveTo(right, top + radius)
self.top_rect_path.arcTo(right-radius, top, radius, radius, 0.0, 90.0)
self.top_rect_path.lineTo(left+radius, top)
self.top_rect_path.arcTo(left, top, radius, radius, 90.0, 90.0)
self.top_rect_path.lineTo(left, top + header)
self.top_rect_path.lineTo(right, top + header)
def paintEvent(self, event):
""" """
self.build_paths()
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing)
painter.fillPath(self.round_rect_path, self.color_back)
painter.fillPath(self.top_rect_path, self.color_top)
painter.strokePath(self.round_rect_path, QPen(Qt.gray, 1))
# TODO: Build the pointing arrow?
def keyReleaseEvent(self, event):
""" """
key = event.key()
self.key_pressed = key
keys = [Qt.Key_Right, Qt.Key_Left, Qt.Key_Down, Qt.Key_Up,
Qt.Key_Escape, Qt.Key_PageUp, Qt.Key_PageDown,
Qt.Key_Home, Qt.Key_End, Qt.Key_Menu]
if key in keys:
if not self.is_fade_running():
self.sig_key_pressed.emit()
def mousePressEvent(self, event):
"""override Qt method"""
# Raise the main application window on click
self.parent.raise_()
self.raise_()
if event.button() == Qt.RightButton:
pass
# clicked_widget = self.childAt(event.x(), event.y())
# if clicked_widget == self.label_current:
# self.context_menu_requested(event)
def focusOutEvent(self, event):
"""Override Qt method."""
# To be used so tips do not appear outside spyder
self.tour.lost_focus()
def context_menu_requested(self, event):
""" """
pos = QPoint(event.x(), event.y())
menu = QMenu(self)
actions = []
action_title = create_action(self, _('Go to step: '), icon=QIcon())
action_title.setDisabled(True)
actions.append(action_title)
# actions.append(create_action(self, _(': '), icon=QIcon()))
add_actions(menu, actions)
menu.popup(self.mapToGlobal(pos))
def reject(self):
"""Qt method to handle escape key event"""
if not self.is_fade_running():
key = Qt.Key_Escape
self.key_pressed = key
self.sig_key_pressed.emit()
class AnimatedTour(QWidget):
""" """
def __init__(self, parent):
QWidget.__init__(self, parent)
self.parent = parent
# Variables to adjust
self.duration_canvas = [666, 666]
self.duration_tips = [333, 333]
self.opacity_canvas = [0.0, 0.7]
self.opacity_tips = [0.0, 1.0]
self.color = Qt.black
self.easing_curve = [QEasingCurve.Linear]
self.current_step = 0
self.step_current = 0
self.steps = 0
self.canvas = None
self.tips = None
self.frames = None
self.spy_window = None
self.widgets = None
self.dockwidgets = None
self.decoration = None
self.run = None
self.is_tour_set = False
self.is_running = False
# Widgets
self.canvas = FadingCanvas(self.parent, self.opacity_canvas,
self.duration_canvas, self.easing_curve,
self.color, tour=self)
self.tips = FadingTipBox(self.parent, self.opacity_tips,
self.duration_tips, self.easing_curve,
tour=self, color_top=MAIN_TOP_COLOR,
color_back=MAIN_BG_COLOR,
combobox_background=MAIN_TOP_COLOR)
# Widgets setup
# Needed to fix spyder-ide/spyder#2204.
self.setAttribute(Qt.WA_TransparentForMouseEvents)
# Signals and slots
self.tips.button_next.clicked.connect(self.next_step)
self.tips.button_previous.clicked.connect(self.previous_step)
self.tips.button_close.clicked.connect(self.close_tour)
self.tips.button_run.clicked.connect(self.run_code)
self.tips.button_home.clicked.connect(self.first_step)
self.tips.button_end.clicked.connect(self.last_step)
self.tips.button_run.clicked.connect(
lambda: self.tips.button_run.setDisabled(True))
self.tips.combo_title.currentIndexChanged.connect(self.go_to_step)
# Main window move or resize
self.parent.sig_resized.connect(self._resized)
self.parent.sig_moved.connect(self._moved)
# To capture the arrow keys that allow moving the tour
self.tips.sig_key_pressed.connect(self._key_pressed)
# To control the focus of tour
self.setting_data = False
self.hidden = False
def _resized(self, event):
""" """
if self.is_running:
size = event.size()
self.canvas.setFixedSize(size)
self.canvas.update_canvas()
if self.is_tour_set:
self._set_data()
def _moved(self, event):
""" """
if self.is_running:
pos = event.pos()
self.canvas.move(QPoint(pos.x(), pos.y()))
if self.is_tour_set:
self._set_data()
def _close_canvas(self):
""" """
self.tips.hide()
self.canvas.fade_out(self.canvas.hide)
def _clear_canvas(self):
""" """
# TODO: Add option to also make it white... might be usefull?
# Make canvas black before transitions
self.canvas.update_widgets(None)
self.canvas.update_decoration(None)
self.canvas.update_canvas()
def _move_step(self):
""" """
self._set_data()
# Show/raise the widget so it is located first!
widgets = self.dockwidgets
if widgets is not None:
widget = widgets[0]
if widget is not None:
widget.show()
widget.raise_()
self._locate_tip_box()
# Change in canvas only after fadein finishes, for visual aesthetics
self.tips.fade_in(self.canvas.update_canvas)
self.tips.raise_()
def _set_modal(self, value, widgets):
""" """
platform = sys.platform.lower()
if 'linux' in platform:
pass
elif 'win' in platform:
for widget in widgets:
widget.setModal(value)
widget.hide()
widget.show()
elif 'darwin' in platform:
pass
else:
pass
def _process_widgets(self, names, spy_window):
""" """
widgets = []
dockwidgets = []
for name in names:
base = name.split('.')[0]
temp = getattr(spy_window, base)
# Check if it is the current editor
if 'get_current_editor()' in name:
temp = temp.get_current_editor()
temp = getattr(temp, name.split('.')[-1])
widgets.append(temp)
# Check if it is a dockwidget and make the widget a dockwidget
# If not return the same widget
temp = getattr(temp, 'dockwidget', temp)
dockwidgets.append(temp)
return widgets, dockwidgets
def _set_data(self):
"""Set data that is displayed in each step of the tour."""
self.setting_data = True
step, steps, frames = self.step_current, self.steps, self.frames
current = '{0}/{1}'.format(step + 1, steps)
frame = frames[step]
combobox_frames = [u"{0}. {1}".format(i+1, f['title'])
for i, f in enumerate(frames)]
title, content, image = '', '', None
widgets, dockwidgets, decoration = None, None, None
run = None
# Check if entry exists in dic and act accordingly
if 'title' in frame:
title = frame['title']
if 'content' in frame:
content = frame['content']
if 'widgets' in frame:
widget_names = frames[step]['widgets']
# Get the widgets based on their name
widgets, dockwidgets = self._process_widgets(widget_names,
self.spy_window)
self.widgets = widgets
self.dockwidgets = dockwidgets
if 'decoration' in frame:
widget_names = frames[step]['decoration']
deco, decoration = self._process_widgets(widget_names,
self.spy_window)
self.decoration = decoration
if 'image' in frame:
image = frames[step]['image']
if 'interact' in frame:
self.canvas.set_interaction(frame['interact'])
if frame['interact']:
self._set_modal(False, [self.tips])
else:
self._set_modal(True, [self.tips])
else:
self.canvas.set_interaction(False)
self._set_modal(True, [self.tips])
if 'run' in frame:
# Asume that the frist widget is the console
run = frame['run']
self.run = run
self.tips.set_data(title, content, current, image, run,
frames=combobox_frames, step=step)
self._check_buttons()
# Make canvas black when starting a new place of decoration
self.canvas.update_widgets(dockwidgets)
self.canvas.update_decoration(decoration)
self.setting_data = False
def _locate_tip_box(self):
""" """
dockwidgets = self.dockwidgets
# Store the dimensions of the main window
geo = self.parent.frameGeometry()
x, y, width, height = geo.x(), geo.y(), geo.width(), geo.height()
self.width_main = width
self.height_main = height
self.x_main = x
self.y_main = y
delta = 20
# Here is the tricky part to define the best position for the
# tip widget
if dockwidgets is not None:
if dockwidgets[0] is not None:
geo = dockwidgets[0].geometry()
x, y, width, height = geo.x(), geo.y(), geo.width(), geo.height()
point = dockwidgets[0].mapToGlobal(QPoint(0, 0))
x_glob, y_glob = point.x(), point.y()
# Check if is too tall and put to the side
y_fac = (height / self.height_main) * 100
if y_fac > 60: # FIXME:
if x < self.tips.width():
x = x_glob + width + delta
y = y_glob + height/2 - self.tips.height()/2
else:
x = x_glob - self.tips.width() - delta
y = y_glob + height/2 - self.tips.height()/2
else:
if y < self.tips.height():
x = x_glob + width/2 - self.tips.width()/2
y = y_glob + height + delta
else:
x = x_glob + width/2 - self.tips.width()/2
y = y_glob - delta - self.tips.height()
else:
# Center on parent
x = self.x_main + self.width_main/2 - self.tips.width()/2
y = self.y_main + self.height_main/2 - self.tips.height()/2
self.tips.set_pos(x, y)
def _check_buttons(self):
""" """
step, steps = self.step_current, self.steps
self.tips.button_disable = None
if step == 0:
self.tips.button_disable = 'previous'
if step == steps - 1:
self.tips.button_disable = 'next'
def _key_pressed(self):
""" """
key = self.tips.key_pressed
if ((key == Qt.Key_Right or key == Qt.Key_Down or
key == Qt.Key_PageDown) and self.step_current != self.steps - 1):
self.next_step()
elif ((key == Qt.Key_Left or key == Qt.Key_Up or
key == Qt.Key_PageUp) and self.step_current != 0):
self.previous_step()
elif key == Qt.Key_Escape:
self.close_tour()
elif key == Qt.Key_Home and self.step_current != 0:
self.first_step()
elif key == Qt.Key_End and self.step_current != self.steps - 1:
self.last_step()
elif key == Qt.Key_Menu:
pos = self.tips.label_current.pos()
self.tips.context_menu_requested(pos)
def _hiding(self):
self.hidden = True
self.tips.hide()
# --- public api
def run_code(self):
""" """
codelines = self.run
console = self.widgets[0]
for codeline in codelines:
console.execute_code(codeline)
def set_tour(self, index, frames, spy_window):
""" """
self.spy_window = spy_window
self.active_tour_index = index
self.last_frame_active = frames['last']
self.frames = frames['tour']
self.steps = len(self.frames)
self.is_tour_set = True
def start_tour(self):
""" """
geo = self.parent.geometry()
x, y, width, height = geo.x(), geo.y(), geo.width(), geo.height()
# self.parent_x = x
# self.parent_y = y
# self.parent_w = width
# self.parent_h = height
# FIXME: reset step to last used value
# Reset step to begining
self.step_current = self.last_frame_active
# Adjust the canvas size to match the main window size
self.canvas.setFixedSize(width, height)
self.canvas.move(QPoint(x, y))
self.canvas.fade_in(self._move_step)
self._clear_canvas()
self.is_running = True
def close_tour(self):
""" """
self.tips.fade_out(self._close_canvas)
self.canvas.set_interaction(False)
self._set_modal(True, [self.tips])
self.canvas.hide()
try:
# set the last played frame by updating the available tours in
# parent. This info will be lost on restart.
self.parent.tours_available[self.active_tour_index]['last'] =\
self.step_current
except:
pass
self.is_running = False
def hide_tips(self):
"""Hide tips dialog when the main window loses focus."""
self._clear_canvas()
self.tips.fade_out(self._hiding)
def unhide_tips(self):
"""Unhide tips dialog when the main window loses focus."""
self._clear_canvas()
self._move_step()
self.hidden = False
def next_step(self):
""" """
self._clear_canvas()
self.step_current += 1
self.tips.fade_out(self._move_step)
def previous_step(self):
""" """
self._clear_canvas()
self.step_current -= 1
self.tips.fade_out(self._move_step)
def go_to_step(self, number, id_=None):
""" """
self._clear_canvas()
self.step_current = number
self.tips.fade_out(self._move_step)
def last_step(self):
""" """
self.go_to_step(self.steps - 1)
def first_step(self):
""" """
self.go_to_step(0)
def lost_focus(self):
"""Confirm if the tour loses focus and hides the tips."""
if (self.is_running and not self.any_has_focus() and
not self.setting_data and not self.hidden):
self.hide_tips()
def gain_focus(self):
"""Confirm if the tour regains focus and unhides the tips."""
if (self.is_running and self.any_has_focus() and
not self.setting_data and self.hidden):
self.unhide_tips()
def any_has_focus(self):
"""Returns if tour or any of its components has focus."""
f = (self.hasFocus() or self.parent.hasFocus() or
self.tips.hasFocus() or self.canvas.hasFocus())
return f
# ----------------------------------------------------------------------------
# Used for testing the functionality
class TourTestWindow(QMainWindow):
""" """
sig_resized = Signal("QResizeEvent")
sig_moved = Signal("QMoveEvent")
def __init__(self):
super(TourTestWindow, self).__init__()
self.setGeometry(300, 100, 400, 600)
self.setWindowTitle('Exploring QMainWindow')
self.exit = QAction('Exit', self)
self.exit.setStatusTip('Exit program')
# create the menu bar
menubar = self.menuBar()
file_ = menubar.addMenu('&File')
file_.addAction(self.exit)
# create the status bar
self.statusBar()
# QWidget or its instance needed for box layout
self.widget = QWidget(self)
self.button = QPushButton('test')
self.button1 = QPushButton('1')
self.button2 = QPushButton('2')
effect = QGraphicsOpacityEffect(self.button2)
self.button2.setGraphicsEffect(effect)
self.anim = QPropertyAnimation(effect, to_binary_string("opacity"))
self.anim.setStartValue(0.01)
self.anim.setEndValue(1.0)
self.anim.setDuration(500)
lay = QVBoxLayout()
lay.addWidget(self.button)
lay.addStretch()
lay.addWidget(self.button1)
lay.addWidget(self.button2)
self.widget.setLayout(lay)
self.setCentralWidget(self.widget)
self.button.clicked.connect(self.action1)
self.button1.clicked.connect(self.action2)
self.tour = AnimatedTour(self)
def action1(self):
""" """
frames = get_tour('test')
index = 0
dic = {'last': 0, 'tour': frames}
self.tour.set_tour(index, dic, self)
self.tour.start_tour()
def action2(self):
""" """
self.anim.start()
def resizeEvent(self, event):
"""Reimplement Qt method"""
QMainWindow.resizeEvent(self, event)
self.sig_resized.emit(event)
def moveEvent(self, event):
"""Reimplement Qt method"""
QMainWindow.moveEvent(self, event)
self.sig_moved.emit(event)
def test():
""" """
app = QApplication([])
win = TourTestWindow()
win.show()
app.exec_()
if __name__ == '__main__':
test()
| [
"33146580+celkansimay@users.noreply.github.com"
] | 33146580+celkansimay@users.noreply.github.com |
fde93ece31ff06beaef52315467716f6064342e0 | 83efd4e51e7b8fabaa51cdd096fa7d863e83ab09 | /utils/track_utils.py | f1aaf0fcd7de1141ac18c7dadf2041d32bc12e2a | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | pbarsic/handtracking | 533bb9f9839e858f775ba10ea6edddbb2c12e132 | 9c5e59b0f706693a9fde7bb858aea1075daa4311 | refs/heads/master | 2020-08-24T21:49:48.730218 | 2019-10-23T17:21:50 | 2019-10-23T17:21:50 | 216,913,014 | 0 | 0 | Apache-2.0 | 2019-10-22T21:29:13 | 2019-10-22T21:29:13 | null | UTF-8 | Python | false | false | 614 | py | def group_detections(detection_box, detection_score, detection_category):
w=[]
for det in zip(detection_box, detection_score, detection_category):
m = [ a for a in det[0] ]
m.append(det[1])
m.append(det[2])
w.append(m)
return w
def group_detections_threshold(detection_box, detection_score, detection_category, threshold):
w=[]
for det in zip(detection_box, detection_score, detection_category):
if det[1] > threshold:
m = [ a for a in det[0] ]
m.append(det[1])
m.append(det[2])
w.append(m)
return w
| [
"pbarsic@gmail.com"
] | pbarsic@gmail.com |
fe918c512b07df47961368267c73f7c7f365e116 | 39875dd75edf170fd8f5ab7841a73ef50eaaca6f | /tests/utils.py | c6206de3606ae956ba3f13b63da84cdebee632fc | [] | no_license | Anmol1696/Thumbnailify | a224a0997051ec77eb2fd3ced26e055ddc63133c | c194d879320483b8ec173d6acf15580b28374c33 | refs/heads/master | 2021-06-20T00:25:29.862974 | 2019-08-28T16:40:44 | 2019-08-28T16:40:44 | 202,716,394 | 1 | 1 | null | 2021-03-25T22:55:52 | 2019-08-16T11:32:22 | Python | UTF-8 | Python | false | false | 351 | py | import configparser
import base64
from pathlib import Path
def load_config():
config_data = configparser.ConfigParser(allow_no_value=True)
config_data.read(Path(__file__).parent.glob("*.ini"))
return config_data
def file_encoder(filename):
with open(filename, 'rb') as fd:
data = fd.read()
return base64.b64encode(data)
| [
"anmol.yadav@rakuten.com"
] | anmol.yadav@rakuten.com |
b7d684c9b7991cd9e351025c9f0d7d12a0fae18f | ef0765f57796d397f67cb175707aa9eb56624500 | /train.py | 2154fdfff048e2a18e46c7968e53e6ea2b2d64f4 | [
"MIT"
] | permissive | X-CCS/Tacotron2-LPCNet | e3554a74c0c2f6fbd0ad4eb8bd2078998ecf4653 | de2ee5ef313ca109d8ccf4a13bf5adebd4ee42f6 | refs/heads/master | 2022-12-05T20:08:05.491707 | 2020-09-02T09:18:21 | 2020-09-02T09:18:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,686 | py | import argparse
import tensorflow as tf
from tacotron.train import tacotron_train
from wavenet_vocoder.train import wavenet_train
from tacotron.synthesize import tacotron_synthesize
from infolog import log
from hparams import hparams
import os
import infolog
from time import sleep
log = infolog.log
def save_seq(file, sequence, input_path):
'''Save Tacotron-2 training state to disk. (To skip for future runs)
'''
sequence = [str(int(s)) for s in sequence] + [input_path]
with open(file, 'w') as f:
f.write('|'.join(sequence))
def read_seq(file):
'''Load Tacotron-2 training state from disk. (To skip if not first run)
'''
if os.path.isfile(file):
with open(file, 'r') as f:
sequence = f.read().split('|')
return [bool(int(s)) for s in sequence[:-1]], sequence[-1]
else:
return [0, 0, 0], ''
def prepare_run(args):
modified_hp = hparams.parse(args.hparams)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(args.tf_log_level)
run_name = args.name or args.model
log_dir = os.path.join(args.base_dir, 'logs-{}'.format(run_name))
os.makedirs(log_dir, exist_ok=True)
infolog.init(os.path.join(log_dir, 'Terminal_train_log'), run_name)
return log_dir, modified_hp
def train(args, log_dir, hparams):
state_file = os.path.join(log_dir, 'state_log')
#Get training states
(taco_state, GTA_state, wave_state), input_path = read_seq(state_file)
if not taco_state:
log('\n#############################################################\n')
log('Tacotron Train\n')
log('###########################################################\n')
checkpoint = tacotron_train(args, log_dir, hparams)
tf.reset_default_graph()
#Sleep 1 second to let previous graph close and avoid error messages while synthesis
sleep(1)
if checkpoint is None:
raise('Error occured while training Tacotron, Exiting!')
taco_state = 1
save_seq(state_file, [taco_state, GTA_state, wave_state], input_path)
if not GTA_state:
log('\n#############################################################\n')
log('Tacotron GTA Synthesis\n')
log('###########################################################\n')
input_path = tacotron_synthesize(args, hparams, checkpoint)
GTA_state = 1
save_seq(state_file, [taco_state, GTA_state, wave_state], input_path)
if input_path == '' or input_path is None:
raise RuntimeError('input_path has an unpleasant value -> {}'.format(input_path))
if not wave_state:
log('\n#############################################################\n')
log('Wavenet Train\n')
log('###########################################################\n')
checkpoint = wavenet_train(args, log_dir, hparams, input_path)
if checkpoint is None:
raise ('Error occured while training Wavenet, Exiting!')
wave_state = 1
save_seq(state_file, [taco_state, GTA_state, wave_state], input_path)
if wave_state and GTA_state and taco_state:
log('TRAINING IS ALREADY COMPLETE!!')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--base_dir', default='')
parser.add_argument('--hparams', default='',
help='Hyperparameter overrides as a comma-separated list of name=value pairs')
parser.add_argument('--tacotron_input', default='training_data/train.txt')
parser.add_argument('--wavenet_input', default='tacotron_output/gta/map.txt')
parser.add_argument('--name', help='Name of logging directory.')
parser.add_argument('--model', default='Tacotron-2')
parser.add_argument('--input_dir', default='training_data/', help='folder to contain inputs sentences/targets')
parser.add_argument('--output_dir', default='output/', help='folder to contain synthesized mel spectrograms')
parser.add_argument('--mode', default='synthesis', help='mode for synthesis of tacotron after training')
parser.add_argument('--GTA', default='True', help='Ground truth aligned synthesis, defaults to True, only considered in Tacotron synthesis mode')
parser.add_argument('--restore', type=bool, default=True, help='Set this to False to do a fresh training')
parser.add_argument('--summary_interval', type=int, default=250,
help='Steps between running summary ops')
parser.add_argument('--checkpoint_interval', type=int, default=1000,
help='Steps between writing checkpoints')
parser.add_argument('--eval_interval', type=int, default=1000,
help='Steps between eval on test data')
parser.add_argument('--tacotron_train_steps', type=int, default=500000, help='total number of tacotron training steps')
parser.add_argument('--wavenet_train_steps', type=int, default=360000, help='total number of wavenet training steps')
parser.add_argument('--tf_log_level', type=int, default=1, help='Tensorflow C++ log level.')
args = parser.parse_args()
accepted_models = ['Tacotron', 'WaveNet', 'Both', 'Tacotron-2']
if args.model not in accepted_models:
raise ValueError('please enter a valid model to train: {}'.format(accepted_models))
log_dir, hparams = prepare_run(args)
if args.model == 'Tacotron':
tacotron_train(args, log_dir, hparams)
elif args.model == 'WaveNet':
wavenet_train(args, log_dir, hparams, args.wavenet_input)
elif args.model in ('Both', 'Tacotron-2'):
train(args, log_dir, hparams)
else:
raise ValueError('Model provided {} unknown! {}'.format(args.model, accepted_models))
if __name__ == '__main__':
main()
| [
"wangjiaqi12807@autohome.com.cn"
] | wangjiaqi12807@autohome.com.cn |
ecd88c4f72cff8b982179e05bb5a023c1dd7b9a8 | f49a5c06f858d8e9ca1d5485e3d68c722b2839df | /benchmarking_main.py | 8a6de5e17ac0c58e5b8cce6e3fa23282988ca14f | [
"CC-BY-4.0"
] | permissive | asd974512/breast-cancer-sub-types | ec8c9aa35f996aba0b6c095c8e853bc01e6315a0 | 8f6c9af9d66cdcf8260393ae737bfb54678b0032 | refs/heads/master | 2023-09-05T00:32:23.069810 | 2021-11-22T10:58:18 | 2021-11-22T10:58:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,711 | py |
############################# IMPORT LIBRARY #################################
import timeit
start_time = timeit.default_timer()
seed=75
import psutil
import os
import numpy as np
from tensorflow import set_random_seed
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
from scipy import interp
from itertools import cycle
from xgboost import XGBClassifier
from collections import Counter
from sklearn.metrics import average_precision_score, precision_recall_curve, matthews_corrcoef, precision_score, recall_score, confusion_matrix, classification_report
from sklearn.metrics import f1_score, roc_auc_score, auc, cohen_kappa_score, precision_recall_curve, log_loss, roc_curve, classification_report, accuracy_score
from sklearn.model_selection import GridSearchCV, LeaveOneOut, cross_val_score, cross_val_predict, StratifiedKFold, StratifiedShuffleSplit
from sklearn.metrics.classification import accuracy_score
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis, QuadraticDiscriminantAnalysis
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn import metrics
from sklearn.ensemble import RandomTreesEmbedding, RandomForestClassifier, GradientBoostingClassifier, VotingClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC, LinearSVC
from sklearn.neural_network import MLPClassifier, BernoulliRBM
from sklearn import model_selection
from sklearn.decomposition import PCA, KernelPCA, SparsePCA, MiniBatchSparsePCA, NMF, TruncatedSVD, FastICA, FactorAnalysis, LatentDirichletAllocation
from sklearn.neighbors import KNeighborsClassifier
from sklearn.datasets import make_classification
from sklearn.multiclass import OneVsRestClassifier, OneVsOneClassifier
from sklearn.preprocessing import Normalizer, MinMaxScaler, StandardScaler, RobustScaler, LabelEncoder, label_binarize, QuantileTransformer
from sklearn.manifold import TSNE
from sklearn.feature_selection import SelectFromModel, SelectKBest, chi2, RFE, RFECV
from sklearn.pipeline import make_pipeline
from imblearn.over_sampling import RandomOverSampler, ADASYN, SMOTE
from imblearn.combine import SMOTEENN, SMOTETomek
from keras.initializers import RandomNormal
from keras.wrappers.scikit_learn import KerasClassifier
from keras.layers import Input, Dense
from keras.models import Model, load_model
from keras_adversarial import AdversarialOptimizerSimultaneous, normal_latent_sampling
from variational_autoencoder import *
from variational_autoencoder_multilayer import *
from aae_architechture import *
from deep_autoencoder import *
from denoising_autoencoder import *
from deep_denoising_autoencoder import *
from shallow_autoencoder import *
matplotlib.use('Agg')
np.random.seed(seed)
####################### LOAD BREAST CANCER DATASET #######################
file_1 = pd.read_csv('./data/subtype_molecular_rna_seq.csv')
data = file_1.iloc[0:20439,2:607].values
X=data.T
file_2 = pd.read_csv('./data/subtype_molecular_rna_seq_label.csv', low_memory=False)
label= file_2.iloc[0,2:607].values
y=label.T
print('Actual dataset shape {}'.format(Counter(y)))
############################ LOAD UCEC DATA ###########################
'''
file_1 = pd.read_csv('./data/ucec_rna_seq.csv')
data = file_1.iloc[0:20482,2:232].values
X=data.T
file_2 = pd.read_csv('./data/ucec_rna_seq_label.csv', low_memory=False)
label = file_2.iloc[0,2:232].values #First row then column from dataset
y=label.T
print('Actual dataset shape {}'.format(Counter(y)))
'''
count=0
aaecount=0
figure=0
classifiers = [
KNeighborsClassifier(3),
DecisionTreeClassifier(random_state=seed),
RandomForestClassifier(criterion='entropy', n_estimators=100, random_state=seed),
XGBClassifier(learning_rate=0.001,max_depth=4,n_estimators=100, nthread=1, subsample=0.65),
GradientBoostingClassifier(random_state=seed),
GaussianNB(),
LinearDiscriminantAnalysis(),
QuadraticDiscriminantAnalysis(),
SVC(kernel='rbf', probability=True, random_state=seed),
LogisticRegression(C=0.1, multi_class= 'multinomial', solver='sag', random_state=seed),
MLPClassifier(hidden_layer_sizes=(500), random_state=seed, verbose=True, activation='tanh', solver='adam', alpha=0.0001, batch_size='auto'),
VotingClassifier(estimators=[('MLP', MLPClassifier(hidden_layer_sizes=(500), random_state=seed, verbose=True, activation='tanh', solver='adam', alpha=0.0001, batch_size='auto')),
('LDA', LinearDiscriminantAnalysis()),
('LR', LogisticRegression(C=0.1, multi_class= 'multinomial', solver='sag', random_state=seed))], voting='soft')
]
log_cols=["Classifier", "Accuracy", "F1-Score", "Recall", "Precision", "AUC", "MCC", "Kappa", "Log-Loss"]
log = pd.DataFrame(columns=log_cols)
def zero_mix(x, n):
temp = np.copy(x)
noise=n
if 'spilt' in noise:
frac = float(noise.split('-')[1])
for i in temp:
n = np.random.choice(len(i), int(round(frac * len(i))), replace=False)
i[n] = 0.0
return (temp)
def gaussian_mix(x):
n = np.random.normal(0, 0.1, (len(x), len(x[0])))
return (x + n)
# The above two functions are used to add noise in the data
# And used to train denoising autoencoder
skf = StratifiedKFold(n_splits=5, shuffle=False, random_state=seed)
skf.get_n_splits(X, y)
print(skf)
for train_index, test_index in skf.split(X, y):
#print("TRAIN:", train_index, "TEST:", test_index)
x_train, x_test = X[train_index], X[test_index]
y_train, y_test = y[train_index], y[test_index]
#print("TRAIN:", x_train, "TEST:", x_test)
print('Dataset shape for Train {}'.format(Counter(y_train)))
print('Dataset shape for Test {}'.format(Counter(y_test)))
################################# OVER SAMPLING ###############################
sm = SMOTE(sampling_strategy='auto', kind='borderline1', random_state=seed)
x_train, y_train = sm.fit_sample(x_train, y_train)
#oversample only traning data
'''
sm = SMOTEENN(ratio=1, random_state=seed)
x_train, y_train = sm.fit_sample(x_train, y_train)
'''
'''
sm = SMOTETomek(ratio=1, random_state=seed)
x_train, y_train = sm.fit_sample(x_train, y_train)
'''
'''
sm=RandomOverSampler(ratio=1, random_state=seed)
x_train, y_train = sm.fit_sample(x_train, y_train)
'''
'''
sm=ADASYN(ratio=1, random_state=seed, n_neighbors=5, n_jobs=1)
x_train, y_train = sm.fit_sample(x_train, y_train)
'''
print('Resampled dataset shape for Train {}'.format(Counter(y_train)))
print('Resampled dataset shape for Test {}'.format(Counter(y_test)))
n_samples, n_features = x_train.shape
############################# FEATURE SCALING/NORMALIZATION ##################
qt = QuantileTransformer(n_quantiles=10, random_state=seed)
qt.fit(x_train)
x_train=qt.transform(x_train)
x_test=qt.transform(x_test)
#use this when train denoising autoencoder
#use either gaussian mix or zero mix
#x_train_noisy=zero_mix(x_train, 'spilt-0.05')
#x_test_noisy=zero_mix(x_test, 'spilt-0.05')
#x_train_noisy=gaussian_mix(x_train)
#x_test_noisy=gaussian_mix(x_test)
'''
# Standart Scaling
sc = StandardScaler()
sc.fit(x_train)
x_train=sc.transform(x_train)
x_test=sc.transform(x_test)
'''
###############################DIMENSION REDUCTION ############################
pca = PCA(n_components=50, random_state=seed)
pca.fit(x_train, y_train)
x_train = pca.transform(x_train)
x_test = pca.transform(x_test)
print ('After PCA', x_train.shape)
################ VARIOUS AUTOENCODERS ###############
aaecount= aaecount+1
aaenum=str(aaecount)
######### Shallow Autoencoder ############
'''
shallow_autoencoder_fit(x_train, x_test, encoding_dim=50, optimizer="adadelta",
loss_function="binary_crossentropy", nb_epoch=100,
batch_size=20, path='./feature_extraction/shallowAE/'+aaenum+'/')
#do not require fine tuning since this autoencoder does not have any hidden layer
shallow_autoencoder = load_model('./feature_extraction/shallowAE/'+aaenum+'/shallow_encoder'+'.h5')
x_train = shallow_autoencoder.predict(x_train)
print('X_Train Shape after ShallowAE :', x_train.shape)
x_test = shallow_autoencoder.predict(x_test)
print('X_Test Shape after ShallowAE :', x_train.shape)
'''
######### Denoising Autoencoder ############
#deep_denoising_autoencoder_fit
'''
deep_denoising_autoencoder_fit(x_train, x_test, x_train_noisy, x_test_noisy, encoding_dim=50, optimizer="adadelta",
loss_function="binary_crossentropy", nb_epoch=100,
batch_size=20, path='./feature_extraction/denoisingAE/'+aaenum+'/')
'''
'''
#do not require fine tuning since this autoencoder does not have any hidden layer
denoising_autoencoder = load_model('./feature_extraction/denoisingAE/'+aaenum+'/denoising_encoder'+'.h5')
x_train = denoising_autoencoder.predict(x_train)
print('X_Train Shape after ShallowAE :', x_train.shape)
x_test = denoising_autoencoder.predict(x_test)
print('X_Test Shape after ShallowAE :', x_train.shape)
'''
######### Deep Autoencoder ##########
'''
deep_autoencoder_fit(x_train, x_test, encoding_dim=50, optimizer="adadelta",
loss_function="binary_crossentropy", nb_epoch=100,
batch_size=20, path='./feature_extraction/deepAE/'+aaenum+'/')
####### don't need to use the following lines if autoencoder require fine tuning
deep_encoder = load_model('./feature_extraction/DeepAE/'+aaenum+'/deep_autoencoder'+'.h5')
x_train = deep_encoder.predict(x_train)
print('X_Train Shape after DeepAE :', x_train.shape)
x_test = deep_encoder.predict(x_test)
print('X_Test Shape after DeepAE :', x_test.shape)
'''
############## AAE ##############
'''
aae_model('./feature_extraction/AAE/'+aaenum+'/', AdversarialOptimizerSimultaneous(),
xtrain=x_train,ytrain=y_train, xtest=x_test, ytest=y_train, encoded_dim=50,img_dim=x_train.shape[1], nb_epoch=100)
'''
'''
####### don't need to use the following lines if autoencoder require fine tuning
model = load_model('./feature_extraction/AAE/'+aaenum+'/aae_encoder'+'.h5')
x_train = model.predict(x_train)
print('X_Train Shape after AAE :', x_train.shape)
x_test = model.predict(x_test)
print('X_Test Shape after AAE :', x_test.shape)
'''
################ Variational Autoencoder ####################
'''
vae_model('./feature_extraction/VAE/'+aaenum+'/',x_train.shape[1],
x_train,x_test,intermediate_dim=1000,batch_size=20,latent_dim=50,epochs=100)
'''
'''
####### don't need to use the following lines if autoencoder require fine tuning
model = load_model('./feature_extraction/VAE/'+aaenum+'/vae_encoder'+'.h5')
x_train = model.predict(x_train)
print('X_Train Shape after VAE :', x_train.shape)
x_test = model.predict(x_test)
print('X_Test Shape after VAE :', x_test.shape)
'''
#############################################################################
##################### FINE TUNING AUTOENCODER #########################
#############################################################################
'''
y_train_binarize = label_binarize(y_train, classes=[0,1,2,3])
y_test_binarize = label_binarize(y_test, classes=[0,1,2,3])
model = load_model('./feature_extraction/deepAE/'+aaenum+'/deep_encoder'+'.h5')
model.summary()
#pull z_mean for variational autoencoder
transfer_layer=model.get_layer('encoder_mu')
aae_prev_model=Model(inputs=model.input,outputs=transfer_layer.output)
new_model=Sequential()
new_model.add(aae_prev_model)
new_model.add(Dropout(p = 0.001))
new_model.add(Dense(units = 4, activation = 'softmax', name='new_layer_added'))
def print_layer_trainable():
for layer in aae_prev_model.layers:
print("{0}:\t{1}".format(layer.trainable,layer.name))
for layer in aae_prev_model.layers:
layer.trainable=False
print_layer_trainable()
# aae_prev_model.trainable=True
# it does not work as expected
### Now It is time to Modify the Layer ###
for layer in aae_prev_model.layers:
#trainable=('encoder_mu' in layer.name)
trainable=('encoder_mu' in layer.name)
#trainable=('encoder_h2' in layer.name or 'encoder_mu' in layer.name)
layer.trainable=trainable
print_layer_trainable()
#Here we choose adadelta as optimizer
new_model.compile(optimizer = 'adadelta', loss = 'categorical_crossentropy', metrics = ['accuracy'])
history=new_model.fit(x_train, y_train_binarize, batch_size = 20, epochs = 50) # for ANN keras
score = new_model.evaluate(x_test, y_test_binarize, verbose=1, batch_size=20)
print("Test Accuracy: \n%s: %.2f%%" % (new_model.metrics_names[1], score[1]*100))
path='./feature_extraction/deepAE/fine_tuned/'+aaenum+'/'
df = pd.DataFrame(history.history)
df.to_csv(os.path.join(path, 'fine_tuned_history.csv'))
new_model.summary()
new_model.layers.pop()
#Remove the last layer
new_model.summary()
new_model.layers.pop()
#Remove the dropout layer
new_model.summary()
new_model.save(os.path.join(path,'encoder_fine_tuned.h5'))
model = load_model('./feature_extraction/deepAE/fine_tuned/'+aaenum+'/encoder_fine_tuned'+'.h5')
model.summary()
x_train = model.predict(x_train)
print('X_Train Shape after AAE :', x_train.shape)
x_test = model.predict(x_test)
print('X_Test Shape after AAE :', x_test.shape)
'''
######################## CLASSIFICATION ##########################
for clf in classifiers:
clf.fit(x_train, y_train)
name = clf.__class__.__name__
print("="*30)
print(name)
print('****Results****')
y_pred = clf.predict(x_test)
y_pred_proba = clf.predict_proba(x_test)
y_test_binarize = label_binarize(y_test, classes=[0,1,2,3])
acc = accuracy_score(y_test, y_pred)
print("Accuracy: {:.4%}".format(acc))
f1=f1_score(y_test, y_pred,average='weighted')
print("F1 Score Weighted: {:.4%}".format(f1))
rs=recall_score(y_test, y_pred, average='weighted')
print("Recall Score Weighted: {:.4%}".format(rs))
ps=precision_score(y_test, y_pred, average='weighted')
print("Precision Weighted: {:.4%}".format(ps))
auc=roc_auc_score(y_test_binarize,y_pred_proba, average='macro')
print("AUC Score: {:.4%}".format(auc))
mcc=matthews_corrcoef(y_test, y_pred)
print("MCC Score: {:.4%}".format(mcc))
kappa=cohen_kappa_score(y_test, y_pred, labels=None, weights=None, sample_weight=None)
print("Kappa: {:.4%}".format(kappa))
ll = log_loss(y_test, y_pred_proba)
print("Log Loss: {:.4%}".format(ll))
log_entry = pd.DataFrame([[name, acc, f1, rs, ps, auc, mcc, kappa, ll]], columns=log_cols)
log = log.append(log_entry)
print("="*30)
print (log)
################################################################################
print('###########################################')
result_temp = pd.DataFrame(log)
result_final=result_temp.groupby('Classifier').mean()
result_final.to_csv("./results/result.tsv", sep='\t')
print (result_final)
print('###########################################')
print('Result Saved Successfully')
end_time = timeit.default_timer()
###### PRINT TIME ########
print('###############')
print('Wall Clock Time')
print ((end_time - start_time), 'Sec')
time=(end_time - start_time)
minutes = time // 60
time %= 60
seconds = time
print(minutes, 'Minutes', seconds,'Seconds')
######## CPU USAGE #######
print('###############')
print('CPU Usage')
print(psutil.cpu_percent(), '%')
print('THE END')
| [
"noreply@github.com"
] | asd974512.noreply@github.com |
e56d8abf68eeabd78679feae85ab12666d37e27e | 3facdefca75155161d8a1a1c7ddfaf10f3f2c6fe | /venv/Lib/site-packages/eikon/streaming_session/streamingprices.py | f143ee7e2d99a21a6897b7324556870478b6e5fa | [
"Apache-2.0"
] | permissive | suppureme/FisherEmbeddingFinal | b0b171c4757e456046224dcdcc3418889dcaccfc | 0d07f09931658c838988c987cd6d8db5376ff715 | refs/heads/master | 2023-07-06T19:47:26.755177 | 2021-08-10T06:04:47 | 2021-08-10T06:04:47 | 394,538,875 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 14,146 | py | # coding: utf8
__all__ = ["StreamingPrices"]
import sys
import logging
import asyncio
from pandas import DataFrame
from pandas import to_numeric
from .streamingprice import StreamingPrice
from .stream import StreamState
class StreamingPrices:
"""
Open a streaming price subscription.
Parameters
----------
instruments: list[string]
List of RICs to subscribe.
service: string
Specified the service to subscribe on.
Default: None
fields: string or list[string]
Specified the fields to retrieve.
Default: None
on_refresh: callable object (streaming_prices, instrument_name, message)
Called when a stream on instrument_name was opened successfully or when the stream is refreshed by the server.
This callback is called with the reference to the streaming_prices object, the instrument name and the instrument full image.
Default: None
on_update: callable object (streaming_prices, instrument_name, message)
Called when an update is received for a instrument_name.
This callback is called with the reference to the streaming_prices object, the instrument name and the instrument update.
Default: None
on_status: callable object (streaming_prices, instrument_name, status)
Called when a status is received for a instrument_name.
This callback is called with the reference to the streaming_prices object, the instrument name and the instrument status.
Default: None
on_complete: callable object (streaming_prices, instrument_name)
Called when all subscriptions are completed.
This callback is called with the reference to the streaming_prices object.
Default: None
Raises
------
Exception
If request fails.
Examples
--------
>> import eikon as ek
>> fx = ek.StreamingPrices(['EUR=', 'GBP='])
>> fx.open()
>> bid_eur = fx['EUR']['BID']
>> ask_eur = fx['EUR']['ASK']
>>
>> def on_update(streams, instrument, msg):
... print(msg)
>> subscription = ek.StreamingPrices(['VOD.L', 'EUR=', 'PEUP.PA', 'IBM.N'],
... ['DSPLY_NAME', 'BID', 'ASK'],
... on_update=on_update)
>> subscription.open()
{"EUR=":{"DSPLY_NAME":"RBS LON","BID":1.1221,"ASK":1.1224}}
{"PEUP.PA":{"DSPLY_NAME":"PEUGEOT","BID":15.145,"ASK":15.155}}
{"IBM.N":{"DSPLY_NAME":"INTL BUS MACHINE","BID":"","ASK":""}}
...
"""
class Params(object):
def __init__(self, instruments, fields):
self._universe = instruments
self._fields = fields
@property
def instruments(self):
return self._universe
@property
def fields(self):
return self._fields
class StreamingPricesIterator:
""" StreamingPrices Iterator class """
def __init__(self, streaming_prices):
self._streaming_prices = streaming_prices
self._index = 0
def __next__(self):
"""" Return the next streaming item from streaming price list """
if self._index < len(self._streaming_prices.params.instruments):
result = self._streaming_prices[self._streaming_prices.params.instruments[self._index]]
self._index += 1
return result
raise StopIteration()
def __init__(self,
instruments,
session=None,
fields=[],
service=None,
on_refresh=None,
on_status=None,
on_update=None,
on_complete=None):
from eikon.Profile import get_desktop_session
if session is None:
self._session = get_desktop_session()
else:
self._session = session
if isinstance(instruments, str):
instruments = [instruments]
elif isinstance(instruments, list) and all(isinstance(item, str) for item in instruments):
pass
else:
raise EikonError(-1, "StreamingPrices: instruments must be a list of strings")
self._fields = fields
self.params = StreamingPrices.Params(instruments=instruments, fields=fields)
self._service = service
self._streaming_prices = {}
for name in instruments:
self._streaming_prices[name] = StreamingPrice(session=self._session,
name=name,
fields=self._fields,
service=self._service,
on_refresh=self._on_refresh,
on_update=self._on_update,
on_status=self._on_status,
on_complete=self._on_complete
)
self._on_refresh_cb = on_refresh
self._on_status_cb = on_status
self._on_update_cb = on_update
self._on_complete_cb = on_complete
self._state = StreamState.Closed
# set universe of on_complete
self._on_complete_set = None
@property
def state(self):
return self._state
###################################################
# Access to StreamingPrices as a dict #
###################################################
def keys(self):
if self._streaming_prices:
return self._streaming_prices.keys()
return {}.keys()
def values(self):
if self._streaming_prices:
return self._streaming_prices.values()
return {}.values()
def items(self):
if self._streaming_prices:
return self._streaming_prices.items()
return {}.items()
###################################################
# Make StreamingPrices iterable #
###################################################
def __iter__(self):
return StreamingPrices.StreamingPricesIterator(self)
def __getitem__(self, item):
if item in self.params.instruments:
return self._streaming_prices[item]
else:
raise KeyError(f"{item} not in StreamingPrices universe")
def __len__(self):
return len(self.params.instruments)
###################################################
# methods to open synchronously item stream #
###################################################
def open(self, with_updates=True):
"""
Open synchronously the streaming price
"""
return self._session._loop.run_until_complete(self.open_async(with_updates=with_updates))
################################################
# methods to open asynchronously item stream #
################################################
async def open_async(self, with_updates=True):
"""
Open asynchronously the streaming price
"""
self._session.log(1, f'StreamingPrices : open streaming on {self.params.instruments}')
if self._state == StreamState.Open:
return
self._state = StreamState.Pending
self._on_complete_set = set()
task_list = [stream.open_async(with_updates=with_updates) for stream in self._streaming_prices.values()]
await asyncio.wait(task_list, return_when=asyncio.ALL_COMPLETED)
self._state = StreamState.Open
self._session.log(1, f'StreamingPrices : start asynchrously streaming on {self.params.instruments} done')
return self._state
def close(self):
if self._state is not StreamState.Closed:
self._session.log(1, f'StreamingPrices : close streaming on {self.params.instruments}')
for stream in self._streaming_prices.values():
stream.close()
self._state = StreamState.Closed
return self._state
def get_snapshot(self, instruments=None, fields=None, convert=True):
"""
Returns a Dataframe filled with snapshot values for a list of instrument names and a list of fields.
Parameters
----------
instruments: list of strings
List of instruments to request snapshot data on.
fields: list of strings
List of fields to request.
convert: boolean
If True, force numeric conversion for all values.
Returns
-------
pandas.DataFrame
pandas.DataFrame content:
- columns : instrument and fieled names
- rows : instrument name and field values
Raises
------
Exception
If request fails or if server returns an error
ValueError
If a parameter type or value is wrong
Examples
--------
>>> import eikon as ek
>>> ek.set_app_key('set your app key here')
>>> streaming_prices = ek.StreamingPrices(instruments=["MSFT.O", "GOOG.O", "IBM.N"], fields=["BID", "ASK", "OPEN_PRC"])
>>> data = streaming_prices.get_snapshot(["MSFT.O", "GOOG.O"], ["BID", "ASK"])
>>> data
Instrument BID ASK
0 MSFT.O 150.9000 150.9500
1 GOOG.O 1323.9000 1327.7900
2 IBM.N NaN NaN
"""
from eikon.eikonError import EikonError
if instruments:
for name in instruments:
if name not in self.params.instruments:
raise ElektronError(-1, f'Instrument {name} was not requested : {self.params.instruments}')
if fields:
for field in fields:
if field not in self.params.fields:
raise EikonError(-1, f'Field {field} was not requested : {self.params.fields}')
_universe = instruments if instruments else self.params.instruments
_all_fields_value = {name: self._streaming_prices[name].get_fields(fields)
if name in self._streaming_prices else None
for name in _universe}
_fields = []
if not fields:
fields = []
for field_values in _all_fields_value.values():
if field_values:
_fields.extend(field for field in field_values.keys() if field not in _fields)
else:
_fields = fields
_df_source = {f: [_all_fields_value[name][f] if _all_fields_value[name].get(f) else None
for name in _universe] for f in _fields}
_price_dataframe = DataFrame(_df_source, columns=_fields)
if convert:
_price_dataframe = _price_dataframe.apply(to_numeric, errors='ignore')
_price_dataframe.insert(0, 'Instrument', _universe)
if convert and _df_source:
_price_dataframe = _price_dataframe.convert_dtypes()
return _price_dataframe
#########################################
# Messages from stream_cache connection #
#########################################
def _on_refresh(self, stream, message):
if self._on_refresh_cb:
try:
self._session.log(1, 'StreamingPrices : call on_refresh callback')
self._state = StreamState.Open
self._session._loop.call_soon_threadsafe(self._on_refresh_cb, self, stream.name, message)
# self._on_refresh_cb(self, name, message)
except Exception as e:
self._session.log(logging.ERROR, f'StreamingPrices on_refresh callback raised exception: {e!r}')
self._session.log(1, f'Traceback : {sys.exc_info()[2]}')
def _on_status(self, stream, status):
if self._on_status_cb:
try:
self._session.log(1, 'StreamingPrices : call on_status callback')
self._session._loop.call_soon_threadsafe(self._on_status_cb, self, stream.name, status)
except Exception as e:
self._session.log(logging.ERROR, f'StreamingPrices on_status callback raised exception: {e!r}')
self._session.log(1, f'Traceback : {sys.exc_info()[2]}')
# check for closed stream when status "Closed", "ClosedRecover", "NonStreaming" or "Redirect"
if stream.state == StreamState.Closed and stream.name not in self._on_complete_set:
# this stream has been closed, so it means completed also
self._on_complete(stream)
def _on_update(self, stream, update):
if self._on_update_cb:
try:
self._session.log(1, 'StreamingPrices : call on_update callback')
self._session._loop.call_soon_threadsafe(self._on_update_cb, self, stream.name, update)
except Exception as e:
self._session.log(logging.ERROR, f'StreamingPrices on_update callback raised exception: {e!r}')
self._session.log(1, f'Traceback : {sys.exc_info()[2]}')
def _on_complete(self, stream):
assert self._on_complete_set is not None
# check for update completed set
if stream.name not in self._on_complete_set:
# update the stream to be in complete list
self._on_complete_set.update([stream.name, ])
# check for complete for all subscribe universe
if self._on_complete_set == set(self.params.instruments):
if self._on_complete_cb:
try:
self._session.log(1, 'StreamingPrices : call on_complete callback')
self._session._loop.call_soon_threadsafe(self._on_complete_cb, self)
except Exception as e:
self._session.log(logging.ERROR, f'StreamingPrices on_complete callback raised exception: {e!r}')
self._session.log(1, f'Traceback : {sys.exc_info()[2]}') | [
"asahu99@gmail.com"
] | asahu99@gmail.com |
acbceabe2af58b797b1e56d056e10142feda7758 | 32c56293475f49c6dd1b0f1334756b5ad8763da9 | /google-cloud-sdk/lib/googlecloudsdk/third_party/appengine/googlecron/__init__.py | c16c11b78e2b1864918de3481da96215d94c1f8f | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | bopopescu/socialliteapp | b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494 | 85bb264e273568b5a0408f733b403c56373e2508 | refs/heads/master | 2022-11-20T03:01:47.654498 | 2020-02-01T20:29:43 | 2020-02-01T20:29:43 | 282,403,750 | 0 | 0 | MIT | 2020-07-25T08:31:59 | 2020-07-25T08:31:59 | null | UTF-8 | Python | false | false | 640 | py | # Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"this file is needed to make this a package"
| [
"jonathang132298@gmail.com"
] | jonathang132298@gmail.com |
3efc22db73e35b507bc67c250be3584146d5c56e | 9d041cdca12fa685261bbd3d3efc1cb77ee3b701 | /DemConverter/dem_converter.py | 1e2e7a7bfa5cdf85eac28241df83672d04a96125 | [] | no_license | sagost/DemConverter | 45c3c5f3ea7ea9abe558c1d90cd2ba9088e7da6e | 99b193d669d45620e524d5eb3f33b19a3f1bb33d | refs/heads/master | 2021-05-14T12:56:46.303643 | 2018-01-05T20:20:19 | 2018-01-05T20:20:19 | 116,423,295 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,883 | py | # -*- coding: utf-8 -*-
"""
/***************************************************************************
DemConverter
A QGIS plugin
Convert DEM file
-------------------
begin : 2017-01-23
git sha : $Format:%H$
copyright : (C) 2017 by Salvatore Agosta
email : sagost@katamail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import QSettings, QTranslator, qVersion, QCoreApplication, Qt
from PyQt4.QtGui import QAction, QIcon
# Initialize Qt resources from file resources.py
import resources
# Import the code for the DockWidget
from dem_converter_dockwidget import DemConverterDockWidget
import os.path
class DemConverter:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'DemConverter_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Declare instance attributes
self.actions = []
self.menu = self.tr(u'&Dem Converter')
# TODO: We are going to let the user set this up in a future iteration
self.toolbar = self.iface.addToolBar(u'DemConverter')
self.toolbar.setObjectName(u'DemConverter')
#print "** INITIALIZING DemConverter"
self.pluginIsActive = False
self.dockwidget = None
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('DemConverter', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToRasterMenu(
self.menu,
action)
self.actions.append(action)
return action
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/plugins/DemConverter/icon.png'
self.add_action(
icon_path,
text=self.tr(u'Convert DEM files'),
callback=self.run,
parent=self.iface.mainWindow())
#--------------------------------------------------------------------------
def onClosePlugin(self):
"""Cleanup necessary items here when plugin dockwidget is closed"""
#print "** CLOSING DemConverter"
# disconnects
self.dockwidget.closingPlugin.disconnect(self.onClosePlugin)
# remove this statement if dockwidget is to remain
# for reuse if plugin is reopened
# Commented next statement since it causes QGIS crashe
# when closing the docked window:
# self.dockwidget = None
self.pluginIsActive = False
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
#print "** UNLOAD DemConverter"
for action in self.actions:
self.iface.removePluginRasterMenu(
self.tr(u'&Dem Converter'),
action)
self.iface.removeToolBarIcon(action)
# remove the toolbar
del self.toolbar
#--------------------------------------------------------------------------
def run(self):
"""Run method that loads and starts the plugin"""
if not self.pluginIsActive:
self.pluginIsActive = True
#print "** STARTING DemConverter"
# dockwidget may not exist if:
# first run of plugin
# removed on close (see self.onClosePlugin method)
if self.dockwidget == None:
# Create the dockwidget (after translation) and keep reference
self.dockwidget = DemConverterDockWidget(self.iface)
# connect to provide cleanup on closing of dockwidget
self.dockwidget.closingPlugin.connect(self.onClosePlugin)
# show the dockwidget
# TODO: fix to allow choice of dock location
self.iface.addDockWidget(Qt.LeftDockWidgetArea, self.dockwidget)
self.dockwidget.show()
| [
"noreply@github.com"
] | sagost.noreply@github.com |
2904e483645aab3aad4727f04b8cb19ab9e1ab65 | f7a1da15ba4941b5c7f13603455bf7e3c25b568e | /ggplot/tests/test_legend.py | a72d8475c032db2cb9c839b2d976b70db432c191 | [
"BSD-2-Clause"
] | permissive | ellisonbg/ggplot | 64b93f172ed729366cda12a1878733d3fc899cb9 | d9028b89c8ae81845b4653deccef897f7ecc8cb8 | refs/heads/master | 2020-05-29T11:57:16.338751 | 2014-05-02T18:14:37 | 2014-05-02T18:14:37 | 19,389,450 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,326 | py | from __future__ import (absolute_import, division, print_function,
unicode_literals)
from . import get_assert_same_ggplot, cleanup, assert_same_elements
assert_same_ggplot = get_assert_same_ggplot(__file__)
from nose.tools import (assert_true, assert_raises, assert_is,
assert_is_not, assert_equal)
from ggplot import *
import six
import pandas as pd
from ggplot.components import assign_visual_mapping
def test_legend_structure():
df = pd.DataFrame({
'xmin': [1, 3, 5],
'xmax': [2, 3.5, 7],
'ymin': [1, 4, 6],
'ymax': [5, 5, 9],
'fill': ['blue', 'red', 'green'],
'quality': ['good', 'bad', 'ugly'],
'alpha': [0.1, 0.5, 0.9],
'texture': ['hard', 'soft', 'medium']})
gg = ggplot(df, aes(xmin='xmin', xmax='xmax', ymin='ymin', ymax='ymax',
colour='quality', fill='fill', alpha='alpha',
linetype='texture'))
new_df, legend = assign_visual_mapping(df, gg.aesthetics, gg)
# All mapped aesthetics must have an entry in the legend
for aesthetic in ('color', 'fill', 'alpha', 'linetype'):
assert(aesthetic in legend)
# None of the unassigned aesthetic should have an entry in the legend
assert('size' not in legend)
assert('shape' not in legend)
# legend entries should remember the column names
# to which they were mapped
assert(legend['fill']['column_name'] == 'fill')
assert(legend['color']['column_name'] == 'quality')
assert(legend['linetype']['column_name'] == 'texture')
assert(legend['alpha']['column_name'] == 'alpha')
# Discrete columns for non-numeric data
assert(legend['fill']['scale_type'] == 'discrete')
assert(legend['color']['scale_type'] == 'discrete')
assert(legend['linetype']['scale_type'] == 'discrete')
assert(legend['alpha']['scale_type'] == 'continuous')
# Alternate
df2 = pd.DataFrame.copy(df)
df2['fill'] = [90, 3.2, 8.1]
gg = ggplot(df2, aes(xmin='xmin', xmax='xmax', ymin='ymin', ymax='ymax',
colour='quality', fill='fill', alpha='alpha',
linetype='texture'))
new_df, legend = assign_visual_mapping(df2, gg.aesthetics, gg)
assert(legend['fill']['scale_type'] == 'continuous')
| [
"has2k1@gmail.com"
] | has2k1@gmail.com |
6b3660695db6f5815ec10bfa006908c75ed584b3 | ae4316d99cbd4d5f8fe26b5e905c2a14fd3a3575 | /lexer.py | ecfd89bed7fea867173f0d57020ca001548ea430 | [] | no_license | tanmaytirpankar/ForLoopParser | 229fbca10780e84d46faaed97c17d43e1ec1b2de | 20a365af04382a2e3de518d9a284e8b4fc284d24 | refs/heads/main | 2022-12-30T14:14:44.310651 | 2020-10-21T20:29:39 | 2020-10-21T20:29:39 | 305,006,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,938 | py | from sly import Lexer
from gtokens import *
class Flexer(Lexer):
# The tokens arranged on each line are type of input, arithmetic operators, logical operators, miscellaneous symbols,
# Grammar specific keywords
tokens = {INTEGER, DECIMAL,
PLUS, MINUS, DIV, MUL, MOD, INC, DEC, \
LT, GT, LET, GET, EQ, NEQ, AND, OR,
LPAREN, RPAREN, CLPAREN, CRPAREN, SLPAREN, SRPAREN, SEMICOLON, COMMA, ASSIGN, \
ID, FOR, BEGIN, END, DO, ENDFOR}
# String characters to be ignored
ignore = ' \t'
ignore_comment = r'\#.*'
# Regular expression rules for tokens
ID = r'[a-zA-Z_][a-zA-Z0-9_]*'
DECIMAL = r'\d+.\d+'
INTEGER = r'\d+'
DIV = r'/'
MUL = r'\*'
INC = r'\+\+'
DEC = r'--'
PLUS = r'\+'
MINUS = r'-'
MOD = r'\%'
EQ = r'=='
ASSIGN = r'='
LT = r'<'
GT = r'>'
LET = r'<='
GET = r'>='
NEQ = r'\!='
AND = r'&&'
OR = r'\|\|'
LPAREN = r'\('
RPAREN = r'\)'
CLPAREN = r'\{'
CRPAREN = r'\}'
SLPAREN = r'\['
SRPAREN = r'\]'
SEMICOLON = r';'
COMMA = r','
ID['for'] = FOR
ID['begin'] = BEGIN
ID['end'] = END
ID['do'] = DO
ID['endfor'] = ENDFOR
pos = 0
token_list = []
current_token = None
tok = None
# Define a rule so we can track line numbers
@_(r'\n+')
def ignore_newline(self, t):
self.lineno += len(t.value)
# Compute column.
# input is the input text string
# token is a token instance
def find_column(self, text, token):
last_cr = text.rfind('\n', 0, token.index)
if last_cr < 0:
last_cr = 0
column = (token.index - last_cr) + 1
return column
def error(self, t):
print('Line %d: Bad character %r' % (self.lineno, t.value[0]))
def create_token_generator(self, text):
self.tok = self.tokenize(text)
def get_current_token(self):
return self.current_token
def get_next_token(self):
try:
return self.tok.__next__()
except StopIteration:
return None
@_(r'\d+')
def INTEGER(self, tok):
tok.value = int(tok.value)
return tok
@_(r'\d+.\d+')
def DECIMAL(self, tok):
tok.value = float(tok.value)
return tok
def show_token(self, tok):
print('type=%r, value=%r' % (tok.type, tok.value))
if __name__ == '__main__':
import sys
text=open(sys.argv[1], 'r').read()
lexer = Flexer()
#for tok in lexer.tokenize(text):
# print('type=%r, value=%r' % (tok.type, tok.value))
"""
program : BEGIN <stmts> END
stmts : <stmt> <stmts>*
stmt : <assign_expr>
| <forblock>
| empty
forblock: FOR LPAREN <loop_assign> (COMMA <loop_assign>)* SEMICOLON <cond_expr> SEMICOLON (<op_expr> | <loop_assign>) (COMMA (<op_expr> | <loop_assign>))* RPAREN DO <stmts> ENDFOR
loop_assign: ID ASSIGN <loop_expr>
loop_expr : <loop_term> ((PLUS | MINUS) <loop_term>)*
loop_term : <loop_factor> ((MUL | DIV | MOD) <loop_factor>)*
loop_factor : MINUS <loop_factor>
| PLUS <loop_factor>
| LPAREN <loop_expr> RPAREN
| INTEGER
| ID
cond_expr : <cond_term> ( (AND | OR) <cond_term>)*
cond_term : <sym_expr> (LT | GT | LET | GET | EQ | NEQ) <sym_expr>
sym_expr : <sym_term> ((PLUS | MINUS) <sym_term>)*
sym_term : <sym_factor> ((MUL | DIV | MOD) <sym_factor>)*
sym_factor : MINUS <sym_factor>
| PLUS <sym_factor>
| LPAREN <sym_expr> RPAREN
| INTEGER
| ID
assign_expr : ID ASSIGN <expr> SEMICOLON
op_expr : ID (INC | DEC)
expr : <term> ((PLUS | MINUS) <term>)*
term : <factor> ((MUL | DIV | MOD) <factor>)*
factor : MINUS <factor>
| PLUS <factor>
| LPAREN <expr> RPAREN
| INTEGER
| ID
"""
| [
"tirpankartanmay@gmail.com"
] | tirpankartanmay@gmail.com |
e4bcdf2e5a6ee879997a68875791a84f8e83bf15 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part009770.py | 6b7721e39926572acd750c7dcc8d9bfd53756e66 | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,931 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher20347(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({0: 1}), [
(VariableWithCount('i3.1.2.2.2.0', 1, 1, S(0)), Add)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Add
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher20347._instance is None:
CommutativeMatcher20347._instance = CommutativeMatcher20347()
return CommutativeMatcher20347._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 20346
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i3.1.2.2.2.1.0_1', S(1))
except ValueError:
pass
else:
pass
# State 20348
if len(subjects) >= 1:
tmp2 = subjects.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i3.1.2.2.2.1.0', tmp2)
except ValueError:
pass
else:
pass
# State 20349
if len(subjects) == 0:
pass
# 0: x*f
yield 0, subst2
subjects.appendleft(tmp2)
if len(subjects) >= 1 and isinstance(subjects[0], Mul):
tmp4 = subjects.popleft()
associative1 = tmp4
associative_type1 = type(tmp4)
subjects5 = deque(tmp4._args)
matcher = CommutativeMatcher20351.get()
tmp6 = subjects5
subjects5 = []
for s in tmp6:
matcher.add_subject(s)
for pattern_index, subst1 in matcher.match(tmp6, subst0):
pass
if pattern_index == 0:
pass
# State 20352
if len(subjects) == 0:
pass
# 0: x*f
yield 0, subst1
subjects.appendleft(tmp4)
return
yield
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy.utils import VariableWithCount
from collections import deque
from .generated_part009771 import *
from multiset import Multiset | [
"franz.bonazzi@gmail.com"
] | franz.bonazzi@gmail.com |
3bfe9478a45b3ce6a71d86c7bffb8ca57afba743 | ec1fede858622eb8f18606e562324bf40aab8768 | /fibo.py | d7f55991d92d311b99c9fa5e18da436f365b1201 | [] | no_license | IgorBambach/ac4 | 4c501e21c08103ca87eea53987672aeed5b2cb87 | ecc9a3029ba9ed561761b993303bba247fa10004 | refs/heads/main | 2023-01-03T08:08:04.878007 | 2020-10-23T00:31:59 | 2020-10-23T00:31:59 | 306,482,128 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | importar os
from flask import Flask , jsonify , request
de matemática import sqrt
app = Flask ( __name__ )
@ app . rota ( '/' )
def nao_entre_em_panico ():
proximo = 1
anterior = 0
limite = 98
encontrado = 0
resposta = "1, \ n "
while ( encontrado < limite ):
tmp = proximo
proximo = proximo + anterior
anterior = tmp
encontrado = encontrado + 1
resposta + = str ( proximo ) + ", \ n "
resposta de retorno
if _name_ == "__main__" :
porta = int ( os . amb . get ( "PORT" , 5000 ))
app . executar ( host = '0.0.0.0' , porta = porta )
| [
"noreply@github.com"
] | IgorBambach.noreply@github.com |
3f373e12c77e03b27c6309737dbc8c7e84b3db7d | 5d132708762372e770954532ad8e8f38f3596122 | /schoolsite/backs/backs/migrations/0013_auto_20210118_1350.py | 85a1fa3e48f501dd64da183c9376723eac589e6f | [] | no_license | ether2314/school-back | 95a8a1dac2130930a3b9c8fd2163ce983d0a6ff7 | c9a31a2fda979feaa1e190fcbb2d0538f6604518 | refs/heads/main | 2023-07-04T13:31:15.107774 | 2021-07-29T15:59:12 | 2021-07-29T15:59:12 | 390,728,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | # Generated by Django 3.0.8 on 2021-01-18 12:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('backs', '0012_auto_20210118_1343'),
]
operations = [
migrations.RemoveField(
model_name='studclass',
name='Year',
),
migrations.AlterField(
model_name='studclass',
name='Id',
field=models.CharField(choices=[('100lvl', '100lvl'), ('200lvl', '200lvl'), ('300lvl', '300lvl'), ('400lvl', '400lvl'), ('500lvl', '500lvl')], max_length=6, primary_key='True', serialize=False),
),
]
| [
"onyii2313@gmail.com"
] | onyii2313@gmail.com |
335927b71241544311639bc055c64afdadfc1656 | 5ea7394e606d21a5a5bf84e657b33c580c6d0ec7 | /tablas/admin.py | 2a46cf4520366548238bba410a5e0f445f4c50c5 | [] | no_license | sangenispablo/erp.colegio | 0738e5e33bafecd6548e598147716e824954a14b | 31544e9016e0c8396d1fb63ff9cfc3b6ab6e9bf8 | refs/heads/master | 2023-04-21T19:40:19.538209 | 2021-05-07T17:06:22 | 2021-05-07T17:06:22 | 361,764,986 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | from django.contrib import admin
from .models import Nivel, Turno, Aula
admin.site.register(Nivel)
admin.site.register(Turno)
admin.site.register(Aula)
| [
"sangenispablo@gmail.com"
] | sangenispablo@gmail.com |
3eb9faa27601591cf0d6b31b28370c3d97589540 | 61d08e23fbb62e16f7bd9d43673b1cf4e0558c37 | /miraPipeline/pipeline/preflight/preflight_libs/get_context.py | cc6dbb2fd318693a80edb4f861ef0a9019199413 | [] | no_license | jonntd/mira | 1a4b1f17a71cfefd20c96e0384af2d1fdff813e8 | 270f55ef5d4fecca7368887f489310f5e5094a92 | refs/heads/master | 2021-08-31T12:08:14.795480 | 2017-12-21T08:02:06 | 2017-12-21T08:02:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 495 | py | # -*- coding: utf-8 -*-
import get_file_name
import get_engine
def get_context():
try:
from miraLibs.pipeLibs import pipeFile
scene_name = get_file_name.get_file_name()
x = pipeFile.PathDetails.parse_path(scene_name)
return x.step
except:
engine = get_engine.get_engine()
if engine == "maya":
return "MidMdl"
elif engine == "nuke":
return "Comp"
elif engine == "houdini":
return "Vfx"
| [
"276575758@qq.com"
] | 276575758@qq.com |
6c670e880143af3d3df7f3fa48cd73def4f4535b | 0ee88932af5b6ed088e471abcbd5f40fd9cbd688 | /Other/eraser.py | 4011853bf7baa80b3ee2c2398547b2997ebdd682 | [] | no_license | BjaouiAya/Cours-Python | 48c740966f9814e1045035ffb902d14783d36194 | 14b306447e227ddc5cb04b8819f388ca9f91a1d6 | refs/heads/master | 2021-06-10T22:17:38.731030 | 2016-11-11T16:45:05 | 2016-11-11T16:45:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,965 | py | #! /usr/bin/env python
# -*- coding:Utf8 -*-
"""Renaming class constructor"""
########################################
#### Classes and Methods imported : ####
########################################
import os
import re
#####################
#### Constants : ####
#####################
# Regex and folder parmeter for music file before burning
REGEX_MP3 = re.compile("\A[0-9]{2}\. " "|\A[0-9]{2} \- " "|\A[0-9]{2}[ \-]")
FOLDER_MP3 = "/home/pampi/Output/cd_test/"
#######################################
#### Classes, Methods, Functions : ####
#######################################
class RenameMe:
"""
In all files inside a directory (self.path) delete a part of the name
according to regex and rename old file.
To check another folder you only have to set self.path to new directory.
Can be used to remove numbered songs like "10 song_nb.mp3".
"""
def __init__(self, path="", regex=REGEX_MP3):
self.path = path
self.regex = regex
def change_regex(self, source, regex_expr=r'', replacement="", mode="rb"):
"""
Change file name according to regex replacement and path variable
"""
with open(source, mode) as f:
old = f.name[len(self.path):]
new = re.sub(self.regex, replacement, old)
os.rename(f.name, self.path+new)
if old is not new:
print(old, "------->", new)
else:
print(old, " didn't change")
def regex_loop(self):
"""
Check all elements inside self.path directory and call
change if element is a file
"""
for mp3 in os.listdir(self.path):
if os.path.isfile(self.path+mp3):
self.change_regex(self.path+mp3)
########################
#### Main Program : ####
########################
if __name__ == '__main__':
cd_dir = RenameMe(FOLDER_MP3)
cd_dir.regex_loop()
| [
"jeremybois@rocketmail.com"
] | jeremybois@rocketmail.com |
e2b9395ca306349905fc72c9f94e4caaa1cbe988 | 5d7684b486a23cb8177dbaa29b7540dc4c63a8f8 | /src/ds_project/urls.py | 242dd938317e4015238deefac20a32ded666f974 | [] | no_license | VitalR/DataAnalysis-App | 8cce543a1981e6dfb3b5204e01700a1349466252 | 32785b3d2d0e5e8042b4d34ca9c72c32c77b0816 | refs/heads/master | 2023-01-03T15:40:42.240473 | 2020-10-31T20:22:18 | 2020-10-31T20:22:18 | 304,565,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,547 | py | """ds_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from .views import home_view, login_view, register_view
from django.contrib.auth import views as auth_views
urlpatterns = [
path('admin/', admin.site.urls),
path('', home_view, name='home'),
path('login/', login_view, name='login'),
path('register/', register_view, name='register'),
path('performance/', include('products.urls', namespace='products')),
path('upload/', include('csvs.urls', namespace='csvs')),
path('customers/', include('customers.urls', namespace='customers')),
path('logout/', auth_views.LogoutView.as_view(template_name='logout.html'), name='logout'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"vitalii.rodikov@gmail.com"
] | vitalii.rodikov@gmail.com |
dc0e963aa23abe50e37b51a150717f3e95b98ee4 | e627d47d5102bd68c2012501aa120833b9271da7 | /aws_api/core/admin.py | deadee44fdc7c2eff24954c469f2c470d31764f1 | [] | no_license | aayushgupta97/django-km | 5ba275d1f85eaaf8bc052e47d2b6b6f1a5e4cf90 | d34cd4f8637718044832d9baeecee86df5e821a5 | refs/heads/master | 2023-01-02T18:12:31.384634 | 2020-10-24T09:21:50 | 2020-10-24T09:21:50 | 298,391,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | from django.contrib import admin
from .models import AWSCredentials
# Register your models here.
admin.site.register(AWSCredentials) | [
"aayushgupta2097@gmail.com"
] | aayushgupta2097@gmail.com |
a5d02be7324103df8b24f058e3b8de41af441989 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02768/s686694566.py | 553aafe96bef5565407dfea61c0ba091a9ef4718 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,026 | py | n, a, b = list(map(int, input().split(' ')))
# 二項係数 mod [検索]
mmm = 1000000000 + 7
fac = []
inv = []
inv_fac = []
def init(n):
fac.append(1)
fac.append(1)
inv.append(0)
inv.append(1)
inv_fac.append(1)
inv_fac.append(1)
for i in range(2, n):
fac.append(fac[-1] * i % mmm)
inv.append(mmm - inv[mmm%i] * (mmm // i) % mmm)
inv_fac.append(inv_fac[-1] * inv[-1] % mmm)
def choice(a, b):
if a < b:
return 0
v = 1
for i in range(b):
v = (v * (a-i)) % mmm # 偶然通っていたけどここはnではなくa (eの途中で気づいた)
return v * inv_fac[b]
init(int(2e5) + 1)
ans = pow(2, n, mmm) - 1 # v, e, mod
bunshi = 1
for i in range(a):
bunshi = (bunshi * (n-i)) % mmm
ans -= choice(n, a)
ans -= choice(n, b)
print(ans % mmm)
'''
4, 1, 3 => 4c2 + 4c4 -> 6+1 = 7
4 + 6 + 4 + 1 - 4c1 - 4c2
1 1
11 2
121 4
1331 8
14641 16, 0が無いので-1, 大きい combination -> 二項係数 mod [検索]
'''
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
ef7fa4042be75c55cebe7e526c05a66ca9081831 | 49d67f9b1abec8ddacd30b9a7ff99e036c1d2dfc | /App/Json_Class/TCPdevice_dto.py | 200fe093346990dd623926036a4591ecaf07c70d | [] | no_license | jacobpaul07/Project-BoschMCM-API | 3b9f25011f24d47089df3185e45e810d0f4b327f | 2cb6962ff825919c3f2d054333dcb7cce275379a | refs/heads/main | 2023-08-29T10:22:02.327369 | 2021-09-30T08:45:57 | 2021-09-30T08:45:57 | 390,989,406 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 862 | py | from dataclasses import dataclass
from typing import Any, List, Optional, TypeVar, Type, cast, Callable
from App.Json_Class.IOTag_dto import IOTag
from App.Json_Class.DtoUtilities import *
from App.Json_Class.TCPdeviceProperties_dto import TCPdeviceProperties
@dataclass
class TCPdevice:
properties: TCPdeviceProperties
IOTags: List[IOTag]
@staticmethod
def from_dict(obj: Any) -> 'TCPdevice':
assert isinstance(obj, dict)
properties = TCPdeviceProperties.from_dict(obj.get("properties"))
IOTags = from_list(IOTag.from_dict, obj.get("IO Tags"))
return TCPdevice(properties, IOTags)
def to_dict(self) -> dict:
result: dict = {"properties": to_class(TCPdeviceProperties, self.properties),
"IO Tags": from_list(lambda x: to_class(IOTag, x), self.IOTags)}
return result
| [
"p.jacobpaul07@gmail.com"
] | p.jacobpaul07@gmail.com |
e4c2f3156d33f3befa11455e042d24a1c9ee39bd | f3d3b3ea381f937109a9ef8c230078c35e5c8b65 | /Ether/upload/src/test/LexerSuite.py | e10206c3d5dc2993bd58af6a3c69fb58b84cc4f4 | [] | no_license | TanDung2512/parserSmartContractPython | e268fa8d2971250aab1cd5f87638d19e70c2d8e3 | 6b6a154004e8d0b9957e9c96b99a4b0d28888e37 | refs/heads/master | 2020-04-26T18:07:02.905020 | 2019-03-04T11:46:37 | 2019-03-04T11:46:37 | 173,734,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | import unittest
from TestUtils import TestLexer
class LexerSuite(unittest.TestCase):
def test_lơercase_identifier(self):
"""test identifiers"""
self.assertTrue(TestLexer.test("abc","abc,<EOF>",101))
| [
"kimtoan1998@gmail.com"
] | kimtoan1998@gmail.com |
12e50b360422f0d643d536e64911172c29b93a33 | 35fa34a9bfc8e8d87d033ac556a0a08a7a55ce16 | /crawl_frame/models/ocean_models.py | f178bb1e2dbe89585c93e5ce669fbeea1bda692b | [] | no_license | yinglingxianghen/spiders_business | 2b923bf502ec1bd8706951c1dea354345ee96c45 | 7e8e6ec911d30a66dc26d825940c78e43b39de14 | refs/heads/master | 2022-07-11T14:01:13.052718 | 2020-05-11T03:49:18 | 2020-05-11T03:49:18 | 262,930,639 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,733 | py | from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Integer
from sqlalchemy.orm import sessionmaker
engine = create_engine("mysql+pymysql://root:111111@127.0.0.1:3306/oceans",encoding="utf-8", echo=True, max_overflow=5)
# engine = create_engine("mysql+pymysql://root:111111@127.0.0.1:3306/oceans",encoding="utf-8", echo=True, max_overflow=5)
# 连接mysql数据库,echo为是否打印结果
Base = declarative_base() # 生成orm基类
class Ocean_Test_Adgs(Base):
__tablename__ = "ocean_test_adgs"
id = Column(Integer, primary_key=True,autoincrement=True)
budget = Column(String(64),comment="组预算")
show_cnt = Column(String(64),comment="展示")
click_cnt = Column(String(64),comment="点击数")
stat_cost = Column(String(64),comment="花费")
convert_cnt = Column(String(64),comment="转化数")
cpc_platform = Column(String(64),comment="cpc")
cpm_platform = Column(String(64),comment="cpm")
conversion_cost = Column(String(64),comment="转化成本")
conversion_rate = Column(String(64),comment="转化率")
click_start_rate = Column(String(64),comment="点击率")
campaign_name = Column(String(64),comment="广告组名称")
campaign_status = Column(Integer(),comment="广告组状态")
landing_type_name = Column(String(64),comment="推广目的")
# Base.metadata.create_all(engine)
# DBSession = sessionmaker(bind=engine)
# session = DBSession()
# newuser0=Admins111111(username="a",password="aaa")
# session.add(newuser0)
# session.commit()
# print("chengg")
# 父类Base调用所有继承他的子类来创建表结构
# if __name__ == '__main__':
# 创建表结构 | [
"1026719936@qq.com"
] | 1026719936@qq.com |
b27239657a5741c26fc636ccfde4758a19cdea07 | 4e8e9ed2a8fb69ed8b46066a8d967e4c107013a4 | /main/auth/reddit.py | b4e81c58ba20f36a1a1a70b8a93f407dda2e0712 | [
"MIT"
] | permissive | welovecoding/vote4code | a57b3d155096d362dca47587ad2985b4201ef036 | be265d553af35dc6c5322ecb3f7d5b3cf7691b75 | refs/heads/master | 2021-08-11T22:46:40.884030 | 2019-11-15T16:15:05 | 2019-11-15T16:15:05 | 90,191,931 | 14 | 0 | MIT | 2021-08-10T22:50:49 | 2017-05-03T20:46:02 | Python | UTF-8 | Python | false | false | 2,613 | py | # coding: utf-8
import base64
from flask_oauthlib import client
from werkzeug import urls
import flask
import auth
import config
import model
import util
from main import app
reddit_config = dict(
access_token_method='POST',
access_token_params={'grant_type': 'authorization_code'},
access_token_url='https://ssl.reddit.com/api/v1/access_token',
authorize_url='https://ssl.reddit.com/api/v1/authorize',
base_url='https://oauth.reddit.com/api/v1/',
consumer_key=model.Config.get_master_db().reddit_client_id,
consumer_secret=model.Config.get_master_db().reddit_client_secret,
request_token_params={'scope': 'identity', 'state': util.uuid()},
)
reddit = auth.create_oauth_app(reddit_config, 'reddit')
def reddit_handle_oauth2_response():
access_args = {
'code': flask.request.args.get('code'),
'client_id': reddit.consumer_key,
'redirect_uri': flask.session.get('%s_oauthredir' % reddit.name),
}
access_args.update(reddit.access_token_params)
auth_header = 'Basic %s' % base64.b64encode(
('%s:%s' % (reddit.consumer_key, reddit.consumer_secret)).encode('latin1')
).strip().decode('latin1')
response, content = reddit.http_request(
reddit.expand_url(reddit.access_token_url),
method=reddit.access_token_method,
data=urls.url_encode(access_args),
headers={
'Authorization': auth_header,
'User-Agent': config.USER_AGENT,
},
)
data = client.parse_response(response, content)
if response.code not in (200, 201):
raise client.OAuthException(
'Invalid response from %s' % reddit.name,
type='invalid_response', data=data,
)
return data
reddit.handle_oauth2_response = reddit_handle_oauth2_response
@app.route('/api/auth/callback/reddit/')
def reddit_authorized():
response = reddit.authorized_response()
if response is None or flask.request.args.get('error'):
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (response['access_token'], '')
me = reddit.request('me')
user_db = retrieve_user_from_reddit(me.data)
return auth.signin_user_db(user_db)
@reddit.tokengetter
def get_reddit_oauth_token():
return flask.session.get('oauth_token')
@app.route('/signin/reddit/')
def signin_reddit():
return auth.signin_oauth(reddit)
def retrieve_user_from_reddit(response):
auth_id = 'reddit_%s' % response['id']
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
return user_db
return auth.create_user_db(
auth_id=auth_id,
name=response['name'],
username=response['name'],
)
| [
"lipiridis@gmail.com"
] | lipiridis@gmail.com |
59bcf1a18ede02687f34e00ecf37f4912b59fe5e | e878866c0e3bea4c8c24da2629e1c8f65135c59c | /ProyectoAutomation/New folder/ejercicio3.py | 81563ae26c5c2150dff8e941aac23932e00709a6 | [] | no_license | KariAltamirano/ProyectoAutomation | 4aaf6b66ddb80d26893354ae4ae596803dfebf67 | 631e5c4c65a21b17e23398792d6939b48aa987da | refs/heads/main | 2023-07-18T07:20:48.756419 | 2021-09-09T03:59:22 | 2021-09-09T03:59:22 | 403,185,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,785 | py | def calcular_precio(marcas,puertas,color,ventas):
marcas = {'ford':100000, 'chevrolet':120000, 'fiat':80000}
colores = {'blanco':10000, 'azul':20000, 'negro':30000}
puertas = {2:50000,4:65000,5:78000}
precio = marcas[marca]+ colores [color]+ puertas[puerta]
if ventas > 5 and ventas <11:
precio= precio*0.9
elif ventas >10 and ventas <51:
precio = precio *0.85
elif ventas >10 and ventas <51:
precio = precio *0.85
elif ventas >50:
precio = precio *0.82
return precio
mas_clientes = 'si'
ventas = []
marcas =['ford', 'chevrolet' , 'fiat']
puertas =[2,4,5]
colores = ['blanco', 'azul' , 'negro']
while mas_clientes == 'si':
nombre = input('Ingrese nombre: ')
apellido = input('Ingrese el apellido: ')
marca= ''
puerta = 0
color = ''
while marca not in marcas:
marca = input('Ingrese la marca: ')
marca=marca.lower()
while puerta not in puertas:
puerta = int(input('ingrese puertas: '))
while color not in colores:
color = input('ingrese el color: ')
color = color.lower()
#precio = calcular_precio(marca,puerta,color)
ventas.append({'nombre':nombre, 'apellido':apellido, 'marca':marca, 'puertas':puerta, 'color':color})
mas_clientes = input('Hay mas clientes?: ')
largo = len(ventas)
for i in ventas:
precio = calcular_precio(marcas,puertas,color,largo)
#largo es la cant de ventas que voy a tener
print("La persona: "+ i ['nombre']+" "+ i ['apellido']+
" compro un auto marca "+ i ['marca'] +" de "+ str(i ['puertas'])+" puertas y color "+ i ['color'] +" con un precio de $"+ str(precio))
| [
"noreply@github.com"
] | KariAltamirano.noreply@github.com |
2ba40809f529128b9eee73cf5c7dcd8a5ac062cf | 877ee8bbd74479f11fe98b987c77bed0586da0fa | /src/utils/send_event.py | c6a047673e6900e0f3e80bc378129437bd193d07 | [] | no_license | HTD-Health/ai-smart-mirror | e33b219593ef12e1ff37d08f329440e12949616f | 5375ca64ac4b10e3efc0c136418e417c06619801 | refs/heads/main | 2023-07-20T16:01:04.948637 | 2021-08-16T13:32:34 | 2021-08-16T13:38:12 | 327,571,598 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,323 | py | import argparse
import time
import zmq
import xdrlib
def main(host, port, topic, data):
"""
Sending events to the specific topic after connecting to the host.
Parameters:
host (str): host name to connect.
port (str): port of the host to connect.
topic (int): ID of topic to subscribe.
data (str): data to be send to topic.
"""
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.connect("tcp://%s:%s" % (host, port))
data_packer = xdrlib.Packer()
data_packer.pack_uint(topic)
data_packer.pack_bytes(data.encode('utf-8'))
# give some time to connect
time.sleep(1)
print(f'sending: {data_packer.get_buffer()}')
socket.send(data_packer.get_buffer())
if __name__ == "__main__":
# Creates Argument Parser object named parser
parser = argparse.ArgumentParser(description='Send Event to bus server')
# Set arguments
parser.add_argument(
'--port',
default="5555",
help='Port to which connect to'
)
parser.add_argument(
'topic',
type=int,
help='topic id'
)
parser.add_argument(
'data',
help='data to be send to topic'
)
args = parser.parse_args()
# Run module
main('127.0.0.1', args.port, args.topic, args.data)
| [
"noreply@github.com"
] | HTD-Health.noreply@github.com |
9388ed6505d0881d0e65812e0362e9978ec0feb0 | bb150497a05203a718fb3630941231be9e3b6a32 | /framework/e2e/jit/test_conv3d_transpose_13.py | ed625c225a1cb9bf00eec92280375ae7f4468a6a | [] | no_license | PaddlePaddle/PaddleTest | 4fb3dec677f0f13f7f1003fd30df748bf0b5940d | bd3790ce72a2a26611b5eda3901651b5a809348f | refs/heads/develop | 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 | Python | UTF-8 | Python | false | false | 641 | py | #!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test jit cases
"""
import os
import sys
sys.path.append(os.path.abspath(os.path.dirname(os.getcwd())))
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(os.getcwd())), "utils"))
from utils.yaml_loader import YamlLoader
from jittrans import JitTrans
yaml_path = os.path.join(os.path.abspath(os.path.dirname(os.getcwd())), "yaml", "nn.yml")
yml = YamlLoader(yaml_path)
def test_conv3d_transpose_13():
"""test conv3d_transpose_13"""
jit_case = JitTrans(case=yml.get_case_info("conv3d_transpose_13"))
jit_case.jit_run()
| [
"825276847@qq.com"
] | 825276847@qq.com |
0de59a9aa89e91f8a13f2348a8538cb213bac16f | 4fd581daa0c21ac36c19da710c85204b7608ac08 | /MyPizzas/piz/models.py | c829cddacd48650dd3783744a97e501049dffc20 | [] | no_license | trilok002/myPizza | a3c98ef7d80099bb82fa0b8739cc796768726629 | 1a7dd5636ae77f6b79563dc5ce6af685cd07467a | refs/heads/master | 2023-03-09T18:22:21.785280 | 2021-03-01T12:14:55 | 2021-03-01T12:14:55 | 343,404,456 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | from djongo import models
# Create your models here.
class Pizza(models.Model):
type=models.CharField(max_length=20,null=True)
size=models.CharField(max_length=50,null=True)
toppings=models.TextField(null=True) | [
"61768477+trilok002@users.noreply.github.com"
] | 61768477+trilok002@users.noreply.github.com |
42bce4085193456af583fe4bd69f5b879e5fe92f | a39224fcd17ff2adb77fa643afed63bc3342a3f4 | /setup.py | e8128dd9f0742381369839c237e8c5bf807d6ee0 | [
"MIT"
] | permissive | HemuManju/reaction-time-classification | ef9ddb241803a16b4b9411eaa8375e8b25fcc9e1 | 8d468516c0591359e082fb8bc5850f8e89e5a6e4 | refs/heads/master | 2023-01-14T09:10:04.142946 | 2021-09-22T19:49:32 | 2021-09-22T19:49:32 | 179,614,766 | 0 | 0 | MIT | 2022-12-27T15:36:26 | 2019-04-05T03:39:43 | Python | UTF-8 | Python | false | false | 261 | py | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Classification of reaction time of an \
operator performing tele-operation',
author='Hemanth ',
license='MIT',
)
| [
"hemanthm2277@gmail.com"
] | hemanthm2277@gmail.com |
96c06261331dd8e373f7552b6c08266873eeb256 | ebe994a845740ff5b8951a1ad9f0eddf6ebab508 | /pokeSite/pokeSite/settings.py | c329a1888c6b7559e8c9da5cede45fd6c2273c6d | [] | no_license | brando-squire/Hack_Pokedex_app | 997c60cfc66b7a89a518df14b2fadbaae8abc107 | 1116487394e60cb26786ae20948330ab28632bad | refs/heads/master | 2020-04-07T01:06:49.257262 | 2018-11-17T19:42:21 | 2018-11-17T19:42:21 | 157,930,997 | 0 | 0 | null | 2018-11-17T18:46:58 | 2018-11-16T23:04:28 | JavaScript | UTF-8 | Python | false | false | 3,128 | py | """
Django settings for pokeSite project.
Generated by 'django-admin startproject' using Django 2.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kiyjff@b=1+azn!2wjg#2v+_)efu7xvdi^+q=!)m(253459=-='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'pokedex.apps.PokedexConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pokeSite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pokeSite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"squiresbrandon@yahoo.com"
] | squiresbrandon@yahoo.com |
1463290d7d0b564ec4fce23796ea7e90f1adea53 | fbb1550dc5437d672ed0137bd7711eba3290dee3 | /students/ganyoa/lessons/lesson01/assignment/inventory_management/electric_appliances_class.py | 5b6a4aebc903f56735730c5623a3178938bba064 | [] | no_license | JavaRod/SP_Python220B_2019 | 2cc379daf5290f366cf92dc317b9cf68e450c1b3 | 5dac60f39e3909ff05b26721d602ed20f14d6be3 | refs/heads/master | 2022-12-27T00:14:03.097659 | 2020-09-27T19:31:12 | 2020-09-27T19:31:12 | 272,602,608 | 1 | 0 | null | 2020-06-16T03:41:14 | 2020-06-16T03:41:13 | null | UTF-8 | Python | false | false | 1,066 | py | '''Electric appliances class'''
from inventory_management.inventory_class import Inventory
#from inventory_class import Inventory
class ElectricAppliances(Inventory):
'''
initiates standard 'Inventory' attributes including brand and voltage for appliances
'''
def __init__(self, product_code, description, market_price, rental_price, brand, voltage):
'''Creates common instance variables from the parent class'''
Inventory.__init__(self, product_code, description, market_price, rental_price)
self.brand = brand
self.voltage = voltage
def return_as_dictionary(self):
'''
returns instance attributes in dictionary
'''
output_dict = {}
output_dict['product_code'] = self.product_code
output_dict['description'] = self.description
output_dict['market_price'] = self.market_price
output_dict['rental_price'] = self.rental_price
output_dict['brand'] = self.brand
output_dict['voltage'] = self.voltage
return output_dict
| [
"ganyoa@gmail.com"
] | ganyoa@gmail.com |
018f409190587b0f73f81fbc955a1fe528f94f44 | 1dfe57ecf168d954b38248f47bca080c8bb789d3 | /djangoenv/bin/pip | 34f5d03d3441d7c12d867e340ab3a3b70cb95d13 | [] | no_license | deb133/mysite | ab01cb053f2eac6675bde3eb429aa26def0bb76f | 1d465bbaaa736eecdf85231478f6cf5f05637726 | refs/heads/master | 2016-08-07T11:49:58.517062 | 2015-03-11T02:09:56 | 2015-03-11T02:09:56 | 31,602,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | #!/Users/DarcyElizabeth/Python/mysite/djangoenv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"darcy.e.balcarce@gmail.com"
] | darcy.e.balcarce@gmail.com | |
4aa5cc6c5f3ee72492708473c20d3ece613db6cb | 7a14adcd5150c970f6a5f1ab83180f091dc0f6cc | /items/migrations/0002_item_origin.py | 4fce527f993c1c330608ba087e0f33e0e273f96e | [] | no_license | synnea/the-modern-witcher | 0b4349e845e12e05f7abf6e0688200a6f54dd96d | f85c4de1fb3167b5c595ac6843c33a55495d7259 | refs/heads/master | 2022-12-09T15:09:23.602406 | 2020-03-05T18:17:27 | 2020-03-05T18:17:27 | 230,802,118 | 0 | 1 | null | 2022-11-22T05:13:41 | 2019-12-29T20:52:55 | HTML | UTF-8 | Python | false | false | 380 | py | # Generated by Django 2.2 on 2020-01-07 14:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('items', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='item',
name='origin',
field=models.CharField(blank=True, max_length=100),
),
]
| [
"carrie.poell@gmail.com"
] | carrie.poell@gmail.com |
de82ab414d200f43559d8f8c2c19e5dce45ff2a1 | b3241816119b6440b50e44a08440bcd22ebc6ad6 | /tests/test_gnn.py | 9a657d2a31870b361c7de9d4f876246c3bcf1e22 | [
"Apache-2.0"
] | permissive | maniospas/pygrank | 8cf7a8c6ca3dc4c1fecdba3994e6d59121d5faba | df1489763c55ecab764671f0d80635d989aa1139 | refs/heads/master | 2023-03-05T14:15:04.763234 | 2023-02-26T21:41:50 | 2023-02-26T21:41:50 | 210,811,921 | 1 | 0 | Apache-2.0 | 2019-09-25T09:54:51 | 2019-09-25T09:54:50 | null | UTF-8 | Python | false | false | 5,679 | py | import pygrank as pg
import tensorflow as tf
import torch
import pytest
def test_gnn_errors():
graph, features, labels = pg.load_feature_dataset('synthfeats')
training, test = pg.split(list(range(len(graph))), 0.8)
training, validation = pg.split(training, 1 - 0.2 / 0.8)
from tensorflow.keras.layers import Dropout, Dense
from tensorflow.keras.regularizers import L2
class APPNP(tf.keras.Sequential):
def __init__(self, num_inputs, num_outputs, hidden=64):
super().__init__([
Dropout(0.5, input_shape=(num_inputs,)),
Dense(hidden, activation="relu", kernel_regularizer=L2(1.E-5)),
Dropout(0.5),
Dense(num_outputs, activation="relu")])
self.ranker = pg.PageRank(0.9, renormalize=True, assume_immutability=True,
use_quotient=False, error_type="iters", max_iters=10) # 10 iterations
def call(self, features, graph, training=False):
predict = super().call(features, training=training)
propagate = self.ranker.propagate(graph, predict, graph_dropout=0.5 if training else 0)
return tf.nn.softmax(propagate, axis=1)
model = APPNP(features.shape[1], labels.shape[1])
with pytest.raises(Exception):
pg.gnn_train(model, graph, features, labels, training, validation, test=test, epochs=2)
pg.load_backend('tensorflow')
pg.gnn_train(model, features, graph, labels, training, validation, test=test, epochs=300, patience=2)
predictions = model(features, graph)
pg.load_backend('numpy')
with pytest.raises(Exception):
pg.gnn_accuracy(labels, predictions, test)
def test_appnp_tf():
from tensorflow.keras.layers import Dropout, Dense
from tensorflow.keras.regularizers import L2
class APPNP(tf.keras.Sequential):
def __init__(self, num_inputs, num_outputs, hidden=64):
super().__init__([
Dropout(0.5, input_shape=(num_inputs,)),
Dense(hidden, activation="relu", kernel_regularizer=L2(1.E-5)),
Dropout(0.5),
Dense(num_outputs, activation="relu")])
self.ranker = pg.ParameterTuner(
lambda par: pg.GenericGraphFilter([par[0] ** i for i in range(int(10))],
error_type="iters", max_iters=int(10)),
max_vals=[0.95], min_vals=[0.5], verbose=False,
measure=pg.Mabs, deviation_tol=0.1, tuning_backend="numpy")
def call(self, features, graph, training=False):
predict = super().call(features, training=training)
propagate = self.ranker.propagate(graph, predict, graph_dropout=0.5 if training else 0)
return tf.nn.softmax(propagate, axis=1)
graph, features, labels = pg.load_feature_dataset('synthfeats')
training, test = pg.split(list(range(len(graph))), 0.8)
training, validation = pg.split(training, 1 - 0.2 / 0.8)
model = APPNP(features.shape[1], labels.shape[1])
with pg.Backend('tensorflow'): # pygrank computations in tensorflow backend
graph = pg.preprocessor(renormalize=True, cors=True)(graph) # cors = use in many backends
pg.gnn_train(model, features, graph, labels, training, validation,
optimizer=tf.optimizers.Adam(learning_rate=0.01), verbose=True, epochs=50)
assert float(pg.gnn_accuracy(labels, model(features, graph), test)) == 1. # dataset is super-easy to predict
def test_appnp_torch():
graph, features, labels = pg.load_feature_dataset('synthfeats')
training, test = pg.split(list(range(len(graph))), 0.8)
training, validation = pg.split(training, 1 - 0.2 / 0.8)
class AutotuneAPPNP(torch.nn.Module):
def __init__(self, num_inputs, num_outputs, hidden=64):
super().__init__()
self.layer1 = torch.nn.Linear(num_inputs, hidden)
self.layer2 = torch.nn.Linear(hidden, num_outputs)
self.activation = torch.nn.ReLU()
self.dropout = torch.nn.Dropout(0.5)
self.num_outputs = num_outputs
self.ranker = pg.ParameterTuner(
lambda par: pg.GenericGraphFilter([par[0] ** i for i in range(int(10))],
error_type="iters", max_iters=int(10)),
max_vals=[0.95], min_vals=[0.5], verbose=False,
measure=pg.Mabs, deviation_tol=0.1, tuning_backend="numpy")
def forward(self, features, graph, training=False):
predict = self.dropout(torch.FloatTensor(features))
predict = self.dropout(self.activation(self.layer1(predict)))
predict = self.activation(self.layer2(predict))
predict = self.ranker.propagate(graph, predict, graph_dropout=0.5 if training else 0)
ret = torch.nn.functional.softmax(predict, dim=1)
self.loss = 0
for param in self.layer1.parameters():
self.loss = self.loss + 1E-5*torch.norm(param)
return ret
def init_weights(m):
if isinstance(m, torch.nn.Linear):
torch.nn.init.xavier_uniform_(m.weight)
m.bias.data.fill_(0.01)
model = AutotuneAPPNP(features.shape[1], labels.shape[1])
graph = pg.preprocessor(renormalize=True, cors=True)(graph)
model.apply(init_weights)
with pg.Backend('pytorch'):
pg.gnn_train(model, features, graph, labels, training, validation, epochs=50)
# TODO: investigate why this is not working as well as tf
#assert float(pg.gnn_accuracy(labels, model(features, graph), test)) == 0.5
| [
"maniospas@hotmail.com"
] | maniospas@hotmail.com |
a3bc969b5283c5f611660bb173b2d3769ae854c3 | 2a68b03c923119cc747c4ffcc244477be35134bb | /interviews/A/VO/wordLadderII.py | a00dffb9d9e8ec178fca30545a4ec9ff564ba284 | [] | no_license | QitaoXu/Lintcode | 0bce9ae15fdd4af1cac376c0bea4465ae5ea6747 | fe411a0590ada6a1a6ae1166c86c585416ac8cda | refs/heads/master | 2020-04-24T20:53:27.258876 | 2019-09-24T23:54:59 | 2019-09-24T23:54:59 | 172,259,064 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,500 | py | from collections import deque
class Solution:
"""
@param: start: a string
@param: end: a string
@param: dict: a set of string
@return: a list of lists of string
"""
def findLadders(self, start, end, dict):
# write your code here
dict.add(start)
dict.add(end)
distance = {}
self.bfs(end, start, dict, distance)
results = []
path = [start]
self.dfs(start, end, path, dict, distance, results)
return results
def bfs(self, start, end, wordDict, distance):
queue = deque()
queue.append(start)
distance[start] = 0
while queue:
size = len(queue)
for _ in range(size):
word = queue.popleft()
for next_word in self.get_next_words(word):
if next_word not in wordDict:
continue
if next_word in distance:
continue
queue.append(next_word)
distance[next_word] = distance[word] + 1
def get_next_words(self, word):
next_words = []
for i in range(len(word)):
left, right = word[: i], word[i + 1:]
for c in "abcdefghijklmnopqrstuvwxyz":
if c == word[i]:
continue
next_word = left + c + right
next_words.append(next_word)
return next_words
def dfs(self, curt, target, path, wordDict, distance, results):
if curt == target:
results.append(path.copy())
return
for next_word in self.get_next_words(curt):
if next_word not in wordDict:
continue
if distance[next_word] != distance[curt] - 1:
continue
path.append(next_word)
self.dfs(next_word, target, path, wordDict, distance, results)
path.pop()
| [
"jeremyxuqitao@outlook.com"
] | jeremyxuqitao@outlook.com |
658637053dd75b704a6f500e279e42fd4a536cf6 | 2231cbaf3b3a4a67b8f25bc7f4150f86b28981d4 | /test/hw4/arithmetic/test7/test7.py | 47d2e11edfbe5ddb450631a472d8a8c4d07fca7c | [] | no_license | TLeben/compilerContruction | 2bd4c17a7682f0a75fb162864856179cf4d1a917 | 90d5ff9b115b066dab7ed98dc7772a8b6f6cab98 | refs/heads/master | 2016-08-04T20:36:03.416893 | 2015-04-09T04:36:39 | 2015-04-09T04:36:39 | 29,304,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | #!/usr/bin/env python
a = 1
b = 2
c = 3
d = 4
e = 5
f = 6
g = 7
h = 8
i = 9
j = 10
print a + b + c + d + e + f + g + h + i + j
| [
"schweikm@gmail.com"
] | schweikm@gmail.com |
352ddf87ea2fdb4df2a799b35d0b72ef0da338c3 | b59a2ad40efead9850c2c9cbff93b2a00c6c47bb | /app/__init__.py | c5833da6e81fbcc2b9193c1bf3248a77a4f678d3 | [] | no_license | khaled-sayed/news_app_python_flask | 67704bb37e0a4dd32d2b7a26d22671d74f59aec1 | 440cc3d0915e79b601b0154d4d13912cd1c2aebd | refs/heads/main | 2023-01-03T21:53:38.227252 | 2020-11-04T15:21:56 | 2020-11-04T15:21:56 | 310,038,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,465 | py | from flask import Flask, render_template
from dotenv import load_dotenv
import config
import os
from flask_login import LoginManager
from werkzeug.utils import secure_filename
base = os.path.dirname(os.path.abspath(__name__))
UPLOAD_FOLDER = 'app/static/images/upload/news'
# ALLOWED_EXTINSTIONS = set(['jpeg','jpg','png'])
app = Flask(__name__)
APP_ROOT = os.path.join(os.path.dirname(__file__), "..")
dontenv_path = os.path.join(APP_ROOT, ".env")
load_dotenv(dontenv_path)
app.config.from_object('config.settings.'+ os.environ.get('FLASK_ENV'))
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
test = app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
from app.models import db
from app.models.admins import Admin
from app.models.categories import Categorie
from app.models.posts import Post
from app.models.users import User
db.create_all()
db.session.commit()
login_manger = LoginManager()
login_manger.login_view = 'auth.admin_login'
login_manger.init_app(app)
# Create Page Not Found
@app.errorhandler(404)
def not_found(error):
return render_template('errors/404.html'), 404
@login_manger.user_loader
def load_user(user_id):
return Admin.query.get(int(user_id))
# Register BluePrints
from app.views.auth import auth as auth_blueprint
from app.views.dashboard import dash as dash_blueprint
from app.views.home import home as home_blueprint
app.register_blueprint(auth_blueprint)
app.register_blueprint(dash_blueprint)
app.register_blueprint(home_blueprint) | [
"62744648+khaled-sayed@users.noreply.github.com"
] | 62744648+khaled-sayed@users.noreply.github.com |
67323a6cd63e9a194cb69a9c34a95d333bb40913 | 393f5e709700153dd8a4dbb4de229126a44fff43 | /cage/settings/local.py | 0fa4d63fd04ce97e4b8e9b2d444bc2b30378f6da | [] | no_license | CNcage/SCP | ec142bd536f3abc09a683e97e19a1ee48f4e8494 | 2ee0778774c9258a14e4d64f22d04537ea3dd7a5 | refs/heads/master | 2022-11-21T04:03:35.015907 | 2020-07-21T16:05:43 | 2020-07-21T16:05:43 | 279,333,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | from decouple import config
from .base import * # noqa
SECRET_KEY = config("SECRET_KEY")
DEBUG = config("DEBUG", cast=bool)
DATABASES = { "default": { "ENGINE": config("DB_ENGINE"),
"NAME": config("DB_NAME"), "USER": config("DB_USER"),
"PASSWORD": config("DB_PASSWD"),
"HOST": config("DB_HOST"),
"PORT": config("DB_PORT", cast=int), }
} | [
"info@jmkit.com"
] | info@jmkit.com |
56b56b28494fd3e8c37f0d64125888e7aade5cf1 | d91391e7f093709fa90129f2d6bdfdb139213582 | /db/sms_pending.py | cd3a57d9b0bcaec4a40dcba6f4af7f59b8f8ad29 | [] | no_license | nomo-w/sms | f0f678dde12928963c0c341df2152a902d06bd5d | f7e8a0f6cdc8c2deee34a5e403245731e6c1e25c | refs/heads/main | 2023-01-23T19:45:53.855664 | 2020-12-07T09:07:21 | 2020-12-07T09:07:21 | 277,744,520 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,595 | py | # coding: utf-8
# 发送短信历史数据库
from config import Sql
from db.base import DBbase
from pymysql import escape_string
from db.history import SmsHistoryDB
from db.statistics import StatisticsDB
from db.cache_statistics import CacheStatisticsDB
import datetime
import random
import time
"""
CREATE TABLE `sms_pending` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user` char(11) NOT NULL,
`user_id` int(11) DEFAULT -1,
`channel_id` int(11) NOT NULL,
`plateform_id` int(11) NOT NULL,
`to_number` char(100) DEFAULT NULL,
`text` char(200) DEFAULT NULL,
`time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`price` decimal(15,2) DEFAULT 0.00,
`description` char(50) DEFAULT 'pending',
`message_id` varchar(50) DEFAULT '0' COMMENT '唯一id',
`is_click` char(10) DEFAULT '未设置',
`callback_id` int(11) DEFAULT 0,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
"""
class SmsPendingDB(DBbase):
"""发送等待返回结果表"""
def __init__(self):
super().__init__()
def is_have_text(self, text):
sql = f'select message_id from sms_pending where text="{text}" and message_id!="{Sql.kl_message_id}" limit 1;'
r = self.execute(sql, fetch=True)
return r[0][0] if r else None
def search_manual(self, message_id):
channel_id_sql = 'channel_id=(select id from sms_channel where channel_type="manual")'
if message_id in ['null', None, '']:
sql = f'select distinct message_id from sms_pending where {channel_id_sql} and ' \
f'message_id!="{Sql.kl_message_id}";'
r = []
for i in self.execute(sql, fetch=True):
_ = self.execute(f'select count(to_number),text from sms_pending where message_id="{i[0]}";', fetch=True)
r.append(_[0] + (i[0],))
else:
# text = escape_string(text)
sql = f'select to_number from sms_pending where {channel_id_sql} and message_id="{message_id}";'
r = self.execute(sql, fetch=True)
return r if r else []
def update_kl(self, message_id):
sql = f'select id,user,user_id,channel_id,plateform_id,to_number,text,is_click,callback_id,time from ' \
f'sms_pending where message_id="{message_id}";'
r = self.execute(sql, fetch=True)
for i in r:
if i:
i = list(i)
i[-1] = i[-1] + datetime.timedelta(seconds=+random.randint(1, 50))
keys = ['user', 'user_id', 'channel_id', 'plateform_id', 'to', 'text', 'is_click', 'callback', 'time_']
dic = dict(zip(keys, i[1:]))
# 扣钱
rate_sql = f'select rate from sms_rate where plateform_id={dic["plateform_id"]} and channel_id={dic["channel_id"]}'
price = self.execute(rate_sql + ';', fetch=True)[0][0]
c_b_sql = f'update sms_plateform set balance=balance-({price}) where id={dic["plateform_id"]};'
# c_b_sql = f'-- update sms_plateform set balance=balance-({price}) where balance>=({rate_sql}) and id={dic["plateform_id"]};'
self.execute(c_b_sql)
with SmsHistoryDB() as db:
db.add(**dic, message_id=message_id, price=price, description='success')
del_sql = f'delete from sms_pending where id={i[0]};'
self.execute(del_sql, commit=True)
return True
def add(self, user_id, message_id, to, text, channel_id, plateform_id=None, callback=0, is_click='未设置', user=None, commit=True):
"""
:param user: operator
:param message_id: unique message id
:param to: to-number
:param text: to-sms-text-body
:param price: sms-price / per
:param err: ret-code
:param err_text: ret error text
:return:
"""
new_text = escape_string(text)
if user is None:
user = f'(select user from sms_users where id={user_id})'
else:
user = f'"{user}"'
if plateform_id is None:
plateform_id = self.execute(f'select plateform_id from sms_users where id={user_id};', fetch=True)[0][0]
sql = f'insert into sms_pending (user,user_id,channel_id,plateform_id,to_number,text,message_id,' \
f'callback_id,is_click) values ({user},{user_id},{channel_id},{plateform_id},"{to}","{new_text}",' \
f'"{message_id}",{callback},"{is_click}");'
self.execute(sql, commit=commit)
with StatisticsDB() as db:
db.update_count(plateform_id, channel_id, "total_count", time.strftime("%Y-%m-%d", time.localtime()))
with CacheStatisticsDB() as db:
db.update_count(plateform_id, channel_id, "total_count", time.strftime("%Y-%m-%d", time.localtime()))
return True
def update(self, message_id, err, err_text, to=None):
# price 价格
# err 1为错误/
sql = f'select user,user_id,channel_id,plateform_id,to_number,text,is_click,callback_id,time from sms_pending ' \
f'where message_id="{message_id}"'
if to:
sql += f' and to_number="{to}";'
else:
sql += ';'
r = self.execute(sql, fetch=True)
if r:
keys = ['user', 'user_id', 'channel_id', 'plateform_id', 'to', 'text', 'is_click', 'callback', 'time_']
dic = dict(zip(keys, r[0]))
price = 0
if err == 0:
# 扣钱
rate_sql = f'select rate from sms_rate where plateform_id={dic["plateform_id"]} and channel_id={dic["channel_id"]}'
price = self.execute(rate_sql+';', fetch=True)[0][0]
c_b_sql = f'update sms_plateform set balance=balance-({price}) where id={dic["plateform_id"]};'
# c_b_sql = f'-- update sms_plateform set balance=balance-({price}) where balance>=({rate_sql}) and id={dic["plateform_id"]};'
self.execute(c_b_sql)
with SmsHistoryDB() as db:
db.add(**dic, message_id=message_id, price=price, description='success' if err == 0 else err_text)
del_sql = f'delete from sms_pending where message_id="{message_id}"'
if to:
del_sql += f' and to_number="{to}" limit 1;'
else:
del_sql += ' limit 1;'
return self.execute(del_sql, commit=True)
return False
| [
"1085640177@qq.com"
] | 1085640177@qq.com |
f3b0975173509a971b078616b1369cd06e6dbd22 | b5db98d382f587857cc56f71dfde17034963fdec | /image_search/signature_database_base.py | 7019b092d2372d185e499c5bbce04dc496472001 | [] | no_license | boluoyu/image_search | abb1563f35675730973bdcff6829da198f6c8d5e | 5675371130eca0849e215c426f51a6fdce66ed45 | refs/heads/master | 2020-07-17T16:12:08.287835 | 2017-04-18T09:51:09 | 2017-04-18T09:51:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,162 | py | from image_search.image_signature import ImageSignature
from operator import itemgetter
import numpy as np
from datetime import datetime
import os.path
class SignatureDatabaseBase(object):
"""Base class for storing and searching image signatures in a database
Note:
You must implement the methods search_single_record and insert_single_record
in a derived class
"""
def search_single_record(self, rec):
"""Search for a matching image record.
Must be implemented by derived class.
Args:
rec (dict): an image record. Will be in the format returned by
make_record
For example, rec could have the form:
{'path': 'https://pixabay.com/static/uploads/photo/2012/11/28/08/56/mona-lisa-67506_960_720.jpg',
'signature': [0.123456, 0.234567, ... ]
'metadata': {...},
}
Returns:
a formatted list of dicts representing matches.
For example, if three matches are found:
[
{'dist': 0.069116439263706961,
'id': u'AVM37oZq0osmmAxpPvx7',
'path': u'https://pixabay.com/static/uploads/photo/2012/11/28/08/56/mona-lisa-67506_960_720.jpg'},
{'dist': 0.22484320805049718,
'id': u'AVM37nMg0osmmAxpPvx6',
'path': u'https://upload.wikimedia.org/wikipedia/commons/thumb/e/ec/Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg/687px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg'},
{'dist': 0.42529792112113302,
'id': u'AVM37p530osmmAxpPvx9',
'metadata': {...},
'path': u'https://c2.staticflickr.com/8/7158/6814444991_08d82de57e_z.jpg'}
]
You can return any fields you like, but must include at least dist and id. Duplicate entries are ok,
and they do not need to be sorted
"""
raise NotImplementedError
def insert_single_record(self, rec):
"""Insert an image record.
Must be implemented by derived class.
Args:
rec (dict): an image record. Will be in the format returned by
make_record
For example, rec could have the form:
{'path': 'https://pixabay.com/static/uploads/photo/2012/11/28/08/56/mona-lisa-67506_960_720.jpg',
'signature': [0.123456, 0.234567, ... ]
'metadata': {...}
}
The number of simple words corresponds to the attribute N
"""
raise NotImplementedError
def __init__(self, distance_cutoff=0.095, save_path='../thumbnail', imgserver_ip = '127.0.0.1', imgserver_port = 9202,
*signature_args, **signature_kwargs):
"""Set up storage scheme for images
Args:
distance_cutoff (Optional [float]): maximum image signature distance to
be considered a match (default 0.095)
save_path (Optional): thumbnail save path
*signature_args: Variable length argument list to pass to ImageSignature
**signature_kwargs: Arbitrary keyword arguments to pass to ImageSignature
"""
# Check float input
if type(distance_cutoff) is not float:
raise TypeError('distance_cutoff should be a float')
if distance_cutoff < 0.:
raise ValueError('distance_cutoff should be > 0 (got %r)' % distance_cutoff)
self.distance_cutoff = distance_cutoff
self.save_path = save_path
self.gis = ImageSignature(*signature_args, **signature_kwargs)
self.imgserver_port = imgserver_port
self.imgserver_ip = imgserver_ip
def add_image(self, path, msg_id, pic_id, img=None, bytestream=False, metadata=None, refresh_after=False):
"""Add a single image to the database
Args:
path (string): path or identifier for image. If img=None, then path is assumed to be
a URL or filesystem path
msg_id (string): message id
pic_id (string): picture id
img (Optional[string]): usually raw image data. In this case, path will still be stored, but
a signature will be generated from data in img. If bytestream is False, but img is
not None, then img is assumed to be the URL or filesystem path. Thus, you can store
image records with a different 'path' than the actual image location (default None)
bytestream (Optional[boolean]): will the image be passed as raw bytes?
That is, is the 'path_or_image' argument an in-memory image? If img is None but, this
argument will be ignored. If img is not None, and bytestream is False, then the behavior
is as described in the explanation for the img argument
(default False)
metadata (Optional): any other information you want to include, can be nested (default None)
"""
rec = make_record(path, self.gis, self.imgserver_ip, self.imgserver_port, msg_id, pic_id, self.save_path, img=img, bytestream=bytestream, metadata=metadata)
self.insert_single_record(rec, refresh_after=refresh_after)
def search_image(self, path, bytestream=False):
"""Search for matches
Args:
path (string): path or image data. If bytestream=False, then path is assumed to be
a URL or filesystem path. Otherwise, it's assumed to be raw image data
bytestream (Optional[boolean]): will the image be passed as raw bytes?
That is, is the 'path_or_image' argument an in-memory image?
(default False)
Returns:
a formatted list of dicts representing unique matches, sorted by dist
For example, if three matches are found:
[
{'dist': 0.069116439263706961,
'id': u'AVM37oZq0osmmAxpPvx7',
'path': u'https://pixabay.com/static/uploads/photo/2012/11/28/08/56/mona-lisa-67506_960_720.jpg'},
{'dist': 0.0148712559918,
'id': u'AVM37nMg0osmmAxpPvx6',
'path': u'https://upload.wikimedia.org/wikipedia/commons/thumb/e/ec/Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg/687px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg'},
{'dist': 0.0307221687987,
'id': u'AVM37p530osmmAxpPvx9',
'path': u'https://c2.staticflickr.com/8/7158/6814444991_08d82de57e_z.jpg'}
]
"""
img = self.gis.preprocess_image(path, bytestream)
# generate the signature
record = make_record(img, self.gis, self.imgserver_ip, self.imgserver_port)
result = self.search_single_record(record)
ids = set()
unique = []
for item in result:
if item['id'] not in ids:
u_item = {}
# u_item['thumbnail'] = item['thumbnail']
# u_item['thumbnail'] = 'http://%s:%s/%s' % (self.imgserver_ip, self.imgserver_port, item['thumbnail'])
u_item['msg_id'] = item['msg_id']
u_item['pic_id'] = item['pic_id']
u_item['path'] = item['path']
u_item['dist'] = item['dist'][0]
unique.append(u_item)
ids.add(item['id'])
r = sorted(unique, key=itemgetter('dist'))
return r
def make_record(path, gis, imgserver_ip, imgserver_port, msg_id=None, pic_id=None, save_path=None, img=None, bytestream=False, metadata=None):
"""Makes a record suitable for database insertion.
Note:
This non-class version of make_record is provided for
CPU pooling. Functions passed to worker processes must
be picklable.
Args:
path (string): path or image data. If bytestream=False, then path is assumed to be
a URL or filesystem path. Otherwise, it's assumed to be raw image data
save_path: thumbnail save path
gis (ImageSignature): an instance of ImageSignature for generating the
signature
img (Optional[string]): usually raw image data. In this case, path will still be stored, but
a signature will be generated from data in img. If bytestream is False, but img is
not None, then img is assumed to be the URL or filesystem path. Thus, you can store
image records with a different 'path' than the actual image location (default None)
bytestream (Optional[boolean]): will the image be passed as raw bytes?
That is, is the 'path_or_image' argument an in-memory image? If img is None but, this
argument will be ignored. If img is not None, and bytestream is False, then the behavior
is as described in the explanation for the img argument
(default False)
metadata (Optional): any other information you want to include, can be nested (default None)
Returns:
An image record.
For example:
{'path': 'https://pixabay.com/static/uploads/photo/2012/11/28/08/56/mona-lisa-67506_960_720.jpg',
'signature': [0.123456, 0.234567, ... ]
'metadata': {...}
}
"""
cur_time = datetime.now()
if save_path != None:
thumbnail_path = os.path.abspath(save_path)
try:
if not os.path.exists(thumbnail_path):
os.makedirs(thumbnail_path)
except OSError:
raise TypeError('Make thumbnail path error.')
thumbnail_name = cur_time.strftime("%Y_%m_%d_%H_%M_%S_%f") + '.jpg'
thumbnail_path = os.path.join(thumbnail_path, thumbnail_name)
else:
thumbnail_path = None
record = dict()
record['path'] = path
if msg_id is not None:
record['msg_id'] = msg_id
if pic_id is not None:
record['pic_id'] = pic_id
if img is not None:
signature = gis.generate_signature(img, bytestream=bytestream)
else:
signature = gis.generate_signature(path, thumbnail_path=thumbnail_path)
record['signature'] = signature.tolist()
if metadata:
record['metadata'] = metadata
record['timestamp'] = cur_time
if thumbnail_path != None:
# record['thumbnail'] = 'http://%s:%s/%s'%(imgserver_ip, imgserver_port, thumbnail_name)
record['thumbnail'] = '%s' % (thumbnail_name)
else:
record['thumbnail'] = 'null'
return record
def normalized_distance(_target_array, _vec):
"""Compute normalized distance to many points.
Computes 1 - a * b / ( ||a|| * ||b||) for every a in target_array
Args:
_target_array (numpy.ndarray): N x m array
_vec (numpy.ndarray): array of size m
Returns:
the normalized distance (float)
"""
topvec = np.dot(_target_array, _vec.reshape(_vec.size, 1))
norm_a = np.linalg.norm(_target_array, axis=1)
norm_a = norm_a.reshape(norm_a.size,1)
norm_b = np.linalg.norm(_vec)
finvec = 1.0 - topvec / (norm_a * norm_b)
return finvec
| [
"guoxiaolu06@126.com"
] | guoxiaolu06@126.com |
7bdd5fad0213f6c34d28ec35b754a74ac25ce4db | 0d05c5e96e91986bbfeda648b502f585885a596c | /trekbooking/wsgi.py | 6bc6d65ef0a74cc3ade9ff8870050e907c71bdc5 | [] | no_license | callingsandesh/ComeOn | 9507e1eba984dca76a7342845394754b2e4f598d | f3c541a62fa79c81e167a661ec45b8ced2c5b4b2 | refs/heads/master | 2021-01-25T13:05:15.985282 | 2020-01-23T14:39:59 | 2020-01-23T14:39:59 | 123,523,896 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
WSGI config for trekbooking project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'trekbooking.settings')
application = get_wsgi_application()
| [
"arvik.rai3377@gmail.com"
] | arvik.rai3377@gmail.com |
0d49f2b63100c3cb5e7ca83a6451e19ffa5f3676 | ecc17e230795f33d94ed33547b1d0f14c602b9ba | /HW12 (Ch04, HMM problem 1_Forward_Backward)/HW12.py | 4446294c4970e10df44507035199cda2add05e89 | [] | no_license | henk2525/NYUST-speech-recognition | 1e8a2159572c756152234f888b5814ddd7baf1ef | ea75d67d6c1d6a4db51064c00cae455961dd5b32 | refs/heads/master | 2022-05-07T12:50:39.224382 | 2019-08-04T12:38:55 | 2019-08-04T12:53:21 | 200,493,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,977 | py | # -*- coding: utf-8 -*-
"""
Created at 2019/4/28
@author: henk guo
"""
import numpy as np
import dspBox
obs1 = dspBox.str2ndar(open('obser1.txt', 'r').read())
obs2 = dspBox.str2ndar(open('obser2.txt', 'r').read())
obs3 = dspBox.str2ndar(open('obser3.txt', 'r').read())
a1 = np.array([[0.2, 0.7, 0.1], [0.1, 0.2, 0.7], [0.7, 0.1, 0.2]])
b1 = np.array([[0.5, 0.4, 0.1], [0.7, 0.2, 0.1], [0.7, 0.1, 0.2]])
pi1 = np.array([0.7, 0.2, 0.1])
a2 = np.array([[0.7, 0.2, 0.1], [0.3, 0.6, 0.1], [0.1, 0.2, 0.7]])
b2 = np.array([[0.1, 0.8, 0.1], [0.2, 0.7, 0.1], [0.4, 0.5, 0.1]])
pi2 = np.array([0.1, 0.7, 0.2])
a3 = np.array([[0.2, 0.7, 0.1], [0.6, 0.3, 0.1], [0.2, 0.7, 0.1]])
b3 = np.array([[0.1, 0.2, 0.7], [0.2, 0.2, 0.6], [0.3, 0.1, 0.6]])
pi3 = np.array([0.2, 0.2, 0.6])
obs, a, b, pi = [obs1, obs2, obs3], [a1, a2, a3], [b1, b2, b3], [pi1, pi2, pi3]
for obsi in range(3): # Index of observation
print("obser" + str(obsi + 1))
for mi in range(3): # Index of model
# =====forward algorithm=====
forwardp = np.zeros((50, 3))
forwardp[0] = [pi[mi][state] * b[mi][state, obs[obsi][0]] for state in range(3)] # initial state
for i in range(1, 50): # start, 1~49
for state in range(3):
for from_ in range(3):
forwardp[i, state] += forwardp[i-1, from_] * a[mi][from_, state] * b[mi][state, obs[obsi][i]]
fp = np.sum(forwardp[-1])
# =====backward algorithm=====
backwardp = np.zeros((50, 3))
backwardp[-1, :] = 1 # initial state
for i in range(48, -1, -1): # start, 48~0
for state in range(3):
for to in range(3):
backwardp[i, state] += backwardp[i + 1, to] * a[mi][state, to] * b[mi][to, obs[obsi][i + 1]]
bp = sum([pi[mi][state] * backwardp[0, state] * b[mi][state, obs[obsi][0]] for state in range(3)])
print('model_{:d} forward:{:.6e} backward:{:.6e}'.format(mi, fp, bp)) | [
"a0956072892@gmail.com"
] | a0956072892@gmail.com |
f87a9d541cd2f410d4566f9b6a619b6f8385df77 | b7319d10078c0fdf5d9ece02f976e64d19ff870e | /Mathematical/Multiply.py | 6a1eca5fdc9c2b25bd4a3683d8376cc2f0685d5a | [] | no_license | parhamrp/Python3 | 5a4e91941a411e52f252819fdcd65924c0466e10 | dba4e7b22c39e10dfee8f49faa24e6705ce894a5 | refs/heads/master | 2020-09-11T01:19:34.543569 | 2019-12-08T17:14:09 | 2019-12-08T17:14:09 | 221,892,561 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | # Write a Python function to multiply all the numbers in a list.
def multiply(lst):
total = 1
for item in lst:
total *= item
# total = lst[item] * lst[item+1]
return total
# print(multiply([1,2,4,-3,3]))
| [
"noreply@github.com"
] | parhamrp.noreply@github.com |
2e1785d76fd4532236e24443c86dc1718f1136ed | 0e5bbc4eab562ae85c3e0972e5789dd2c14eb297 | /5.4_server.py | 772387cd4348bf536b82fc5ac4298df49aa43065 | [] | no_license | kint7/lab5 | c84d71a7b00b03a219d5c8fe7cb541d5ee0c29d2 | eb43c926a8d02f593ae7754a3d6e1feb31256833 | refs/heads/master | 2023-01-24T03:49:21.603390 | 2020-12-09T08:38:16 | 2020-12-09T08:38:16 | 317,923,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,518 | py | # Importing socket library
import socket
# Now we can create socket object
s = socket.socket()
# Lets choose one port and start listening on that port
host_name = socket.gethostname()
IPADDRESS = socket.gethostbyname(host_name)
PORT = 9898
print(">>>IP address of the server: ", IPADDRESS)
print(">>>Server is listening on port: ", PORT)
print("\n>Waiting for connection from a client...")
# Now we need to bind to the above port at server side
s.bind(('', PORT))
# Now we will put server into listening mode
s.listen(10)
# Now we do not know when client will concatct server so server should be listening contineously
while True:
# Now we can establish connection with clien
conn, addr = s.accept()
# Send a hello message to client
msg = "\n\nHi, Client [IP address: "+ addr[0] + "], \nThank you for using our storage service. \nYour files are safe with us.\n-Server\n"
conn.send(msg.encode())
filename = conn.recv(1024).decode("utf-8")
file = open(filename, "wb")
# Receive any data from client side
RecvData = conn.recv(99999)
while RecvData:
file.write(RecvData)
RecvData = conn.recv(99999)
# Close the file opened at server side once copy is completed
file.close()
print("\n>File has been copied successfully \n")
# Close connection with client
conn.close()
print(">Server closed the connection \n")
# Come out from the infinite while loop as the file has been copied from client.
break
| [
"2018276758@isiswa.uitm.edu.my"
] | 2018276758@isiswa.uitm.edu.my |
1e73bcb3091075ebead0ba1e029588dec88fead0 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/46/usersdata/98/17294/submittedfiles/funcoes1.py | 94f0f1ec40294a695cc95ea950a44bec636efae5 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,592 | py | # -*- coding: utf-8 -*-
from __future__ import division
def crescente (lista):
#escreva o código da função crescente aqui
cont=0
for i in range(0,len(lista)-1,1):
if lista[i]<lista[i+1]:
cont=cont+1
if cont==len(lista)-1:
return True
else:
return False
#escreva as demais funções
def decrescente (lista):
cont=0
for i in range(0,len(lista)-1,1):
if lista[i]>lista[i+1]:
cont=cont+1
if cont==len(lista)-1:
return True
else:
return False
def iguais (lista):
cont=0
for i in range(0,len(lista)-1,1):
if lista[i]==lista[i+1]:
cont=cont+1
if cont>0:
return True
else:
return False
#escreva o programa principal
n=input('Digite a quantidade de intens nas listas: ')
a=[]
b=[]
c=[]
for i in range(0,n,1):
a.append(input('Digite um valor para a lista a: '))
for i in range(0,n,1):
b.append(input('Digite um valor para a lista b: '))
for i in range(0,n,1):
c.append(input('Digite um valor para a lista c: '))
if crescente (a):
print('S')
else:
print('N')
if decrescente (a):
print('S')
else:
print('N')
if iguais (a):
print('S')
else:
print('N')
if crescente (b):
print('S')
else:
print('N')
if decrescente (b):
print('S')
else:
print('N')
if iguais (b):
print('S')
else:
print('N')
if crescente (c):
print('S')
else:
print('N')
if decrescente (c):
print('S')
else:
print('N')
if iguais (c):
print('S')
else:
print('N') | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
376003ae4440bb82d9c769811431381b61dbebbc | 61465dc9aded40737193de67edbbac89b98b2e83 | /leadmanager/accounts/api.py | 0a668ef7aa1a5a35e75aa82c97d3c58197b43a9b | [] | no_license | NganHaVan/PythonPractice | e0000f62f5eebc70695af447babc1e638ef874a9 | 3a7e376ef691458e720ec9bdef1d41b0b1fcc400 | refs/heads/master | 2023-01-08T22:15:37.082747 | 2019-06-05T09:36:51 | 2019-06-05T09:36:51 | 188,797,453 | 0 | 0 | null | 2023-01-07T05:49:32 | 2019-05-27T07:54:14 | Python | UTF-8 | Python | false | false | 1,401 | py | from knox.models import AuthToken
from rest_framework import generics, permissions
from rest_framework.decorators import permission_classes
from rest_framework.response import Response
from .serializers import LoginSerializer, RegisterSerializer, UserSerializer
# Register API
class RegisterAPI(generics.GenericAPIView):
serializer_class = RegisterSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data = request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
return Response({
"user": UserSerializer(user, context=self.get_serializer_context()).data,
"token": AuthToken.objects.create(user)[1]
})
# Login API
class LoginAPI(generics.GenericAPIView):
serializer_class = LoginSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data = request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data
return Response({
"user": UserSerializer(user, context=self.get_serializer_context()).data,
"token": AuthToken.objects.create(user)[1]
})
# Get UserAPI
class UserAPI(generics.RetrieveAPIView):
permission_classes = [
permissions.IsAuthenticated,
]
serializer_class = UserSerializer
# NOTE: Authorization should be 'Token + token code'
def get_object(self):
return self.request.user
| [
"vannunganha@gmail.com"
] | vannunganha@gmail.com |
6cd3405367f78e286064a46db46b69d8c5f63689 | 900bd035cc429c05307cc6a757cb3482c9074daf | /venv/Scripts/pip3-script.py | 58e93e211506308ae97457d9bbaf4c500f759828 | [] | no_license | maxshtun999/PythonBot1 | 2eff02138b0fced9b8ad6e46608c449c7be83246 | 19f81538f51fe5d0bef7e733cb16938bf949c0da | refs/heads/master | 2022-10-13T10:15:53.866144 | 2020-01-06T14:48:51 | 2020-01-06T14:48:51 | 232,130,241 | 0 | 1 | null | 2022-10-02T05:43:22 | 2020-01-06T15:32:37 | Python | UTF-8 | Python | false | false | 421 | py | #!"C:\Users\Shtun Max\PycharmProjects\PythonBot\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"maxshtun99@gmail.com"
] | maxshtun99@gmail.com |
5a5afc24b7232e7b0dfad88d7345ed4ac5c2c416 | ee27158cc32b364baa2693bee24c407886bd4d65 | /ecourses/migrations/0004_alter_student_universidad.py | 19677460db20b5a7ab125d35d9215cfb31320305 | [] | no_license | BenjaLepe/django-ecourses | e87cf3231de92509f7e012efced695901a93dd46 | caee50380149df71d114e27c6a2bdba7539b4222 | refs/heads/main | 2023-08-24T18:25:19.750885 | 2021-10-25T04:32:07 | 2021-10-25T04:32:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,797 | py | # Generated by Django 3.2.8 on 2021-10-25 02:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ecourses', '0003_course_promedio'),
]
operations = [
migrations.AlterField(
model_name='student',
name='universidad',
field=models.IntegerField(choices=[(0, 'Universidad De Chile'), (1, 'Universidad De Santiago De Chile'), (2, 'Universidad De Valparaíso'), (3, 'Universidad De Antofagasta'), (4, 'Universidad De La Serena'), (5, 'Universidad Del Bío-bío'), (6, 'Universidad De La Frontera'), (7, 'Universidad De Magallanes'), (8, 'Universidad De Talca'), (9, 'Universidad De Atacama'), (10, 'Universidad De Tarapacá'), (11, 'Universidad Arturo Prat'), (12, 'Universidad Metropolitana De Ciencias De La Educación'), (13, 'Universidad De Playa Ancha De Ciencias De La Educación'), (14, 'Universidad De Los Lagos'), (15, 'Universidad Tecnológica Metropolitana'), (16, "Universidad De O'higgins"), (17, 'Universidad De Aysén'), (18, 'Pontificia Universidad Católica De Chile'), (19, 'Universidad De Concepción'), (20, 'Universidad Técnica Federico Santa María'), (21, 'Pontificia Universidad Católica De Valparaíso'), (22, 'Universidad Austral De Chile'), (23, 'Universidad Católica Del Norte'), (24, 'Universidad Católica Del Maule'), (25, 'Universidad Católica De La Santísima Concepción'), (26, 'Universidad Católica De Temuco'), (27, 'Universidad Gabriela Mistral'), (28, 'Universidad Finis Terrae'), (29, 'Universidad Diego Portales'), (30, 'Universidad Central De Chile'), (31, 'Universidad Bolivariana'), (32, 'Universidad Pedro De Valdivia'), (33, 'Universidad Mayor'), (34, 'Universidad Academia De Humanismo Cristiano'), (35, 'Universidad Santo Tomás'), (36, 'Universidad La República'), (37, 'Universidad Sek'), (38, 'Universidad De Las Américas'), (39, 'Universidad Andrés Bello'), (40, 'Universidad De Viña Del Mar'), (41, 'Universidad Adolfo Ibañez'), (42, 'Universidad De Artes, Ciencias Y Comunicación - Uniacc'), (43, 'Universidad Autónoma De Chile'), (44, 'Universidad De Los Andes'), (45, 'Universidad Adventista De Chile'), (46, 'Universidad San Sebastián'), (47, 'Universidad Católica Cardenal Raúl Silva Henríquez'), (48, 'Universidad Del Desarrollo'), (49, 'Universidad De Aconcagua'), (50, 'Universidad Los Leones'), (51, "Universidad Bernardo O'higgins"), (52, 'Universidad Tecnológica De Chile Inacap'), (53, 'Universidad Miguel De Cervantes'), (54, 'Universidad Alberto Hurtado'), (55, 'Instituto Profesional Agrario Adolfo Matthei'), (56, 'Instituto Profesional Inacap O Instituto Profesional Instituto Nacional De Capacitación Profesional Inacap'), (57, 'Instituto Profesional Libertador De Los Andes'), (58, 'Instituto Profesional Instituto De Estudios Bancarios Guillermo Subercaseaux'), (59, 'Instituto Profesional Escuela De Contadores Auditores De Santiago'), (60, 'Instituto Profesional Providencia'), (61, 'Instituto Profesional Chileno Británico De Cultura'), (62, 'Instituto Profesional Duoc Uc'), (63, 'Instituto Profesional Ipg'), (64, 'Instituto Profesional Santo Tomás'), (65, 'Instituto Profesional Instituto Superior De Artes Y Ciencias De La Comunicación'), (66, 'Instituto Profesional Alemán Wilhelm Von Humboldt'), (67, 'Instituto Profesional Diego Portales'), (68, 'Instituto Profesional De Chile'), (69, 'Instituto Profesional Escuela Moderna De Música'), (70, 'Instituto Profesional Esucomex'), (71, 'Instituto Profesional Eatri Instituto Profesional'), (72, 'Instituto Profesional De Los Angeles'), (73, 'Instituto Profesional Dr. Virginio Gómez G.'), (74, 'Instituto Profesional De Ciencias Y Educación Helen Keller'), (75, 'Instituto Profesional Aiep'), (76, 'Instituto Profesional De Arte Y Comunicación Arcos'), (77, 'Instituto Profesional Latinoamericano De Comercio Exterior - Iplacex'), (78, 'Instituto Profesional Los Leones'), (79, 'Instituto Profesional Ciisa'), (80, 'Instituto Profesional Los Lagos'), (81, 'Instituto Profesional De Ciencias De La Computación Acuario Data'), (82, 'Instituto Profesional Del Comercio'), (83, 'Instituto Profesional Del Valle Central'), (84, 'Instituto Profesional Carlos Casanueva'), (85, 'Instituto Profesional Instituto Nacional Del Fútbol'), (86, 'Instituto Profesional Instituto Internacional De Artes Culinarias Y Servicios'), (87, 'Instituto Profesional Projazz'), (88, 'Instituto Profesional Escuela De Cine De Chile'), (89, 'Instituto Profesional De Artes Escénicas Karen Connolly'), (90, 'Instituto Profesional Escuela De Marina Mercante Piloto Pardo'), (91, 'Instituto Profesional Vertical Instituto Profesional'), (92, 'Centro De Formación Técnica Instituto Central De Capacitación Educacional Icce'), (93, 'Centro De Formación Técnica De Enac O Centro De Formación Técnica De Los Establecimientos Nacionales De Educación Cáritas-chile'), (94, 'Centro De Formación Técnica Centro De Enseñanza De Alta Costura Paulina Diard'), (95, 'Centro De Formación Técnica Centro Tecnológico Superior Infomed'), (96, 'Centro De Formación Técnica Instituto Superior Alemán De Comercio Insalco'), (97, 'Centro De Formación Técnica Juan Bohon'), (98, 'Centro De Formación Técnica Barros Arana'), (99, 'Centro De Formación Técnica Santo Tomás'), (100, 'Centro De Formación Técnica Massachusetts'), (101, 'Centro De Formación Técnica Los Lagos'), (102, 'Centro De Formación Técnica Estudio Profesor Valero'), (103, 'Centro De Formación Técnica Cenco'), (104, 'Centro De Formación Técnica Prodata'), (105, 'Centro De Formación Técnica Instituto Superior De Estudios Jurídicos Canon'), (106, 'Centro De Formación Técnica Iprosec'), (107, 'Centro De Formación Técnica San Agustín De Talca'), (108, 'Centro De Formación Técnica Icel'), (109, 'Centro De Formación Técnica Alpes'), (110, 'Centro De Formación Técnica Instituto Técnológico De Chile - I.t.c.'), (111, 'Centro De Formación Técnica Educap'), (112, 'Centro De Formación Técnica Cámara De Comercio De Santiago'), (113, 'Centro De Formación Técnica Laplace O C.f.t. De Estudios Superiores Y Capacitación Profesional Laplace'), (114, 'Centro De Formación Técnica Inacap'), (115, 'Centro De Formación Técnica Del Medio Ambiente'), (116, 'Centro De Formación Técnica Lota-arauco'), (117, 'Centro De Formación Técnica Ceduc - Ucn'), (118, 'Centro De Formación Técnica Ceitec'), (119, 'Centro De Formación Técnica Proandes'), (120, 'Centro De Formación Técnica Accioma'), (121, 'Centro De Formación Técnica De Tarapacá'), (122, 'Centro De Formación Técnica Protec'), (123, 'Centro De Formación Técnica De La Industria Gráfica O Cft. Ingraf'), (124, 'Centro De Formación Técnica De La Pontificia Universidad Católica De Valparaíso O Cft Pucv'), (125, 'Centro De Formación Técnica Teodoro Wickel Kluwen'), (126, 'Centro De Formación Técnica Profasoc'), (127, 'Centro De Formación Técnica Manpower'), (128, 'Centro De Formación Técnica Escuela Culinaria Francesa'), (129, 'Centro De Formación Técnica De La Región Del Maule'), (130, 'Centro De Formación Técnica De La Región De La Araucanía'), (131, 'Centro De Formación Técnica De La Región De Tarapacá'), (132, 'Centro De Formación Técnica De La Región De Coquimbo'), (133, 'Centro De Formación Técnica De La Región De Los Lagos'), (134, 'Centro De Formación Técnica De La Región De Valparaíso'), (135, 'Centro De Formación Técnica De La Región De Los Ríos'), (136, 'Centro De Formación Técnica De La Región De Antofagasta'), (137, 'Centro De Formación Técnica De La Región Metropolitana'), (138, 'Centro De Formación Técnica De La Región De Magallanes Y La Antártica Chilena')], default=1, max_length=100),
),
]
| [
"balepe@uc.cl"
] | balepe@uc.cl |
c470665fd971ef55fbcbf2c680c5254eb0e69e51 | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/playground/memre/armv7l/obsolete/corp2/system/base/man-pages/actions.py | e17573a3dc5e34c142d651a5d3274ff1b0d7e803 | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,461 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2005-2010 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import crosstools
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def install():
crosstools.rawInstall("DESTDIR=%s" % get.installDIR())
crosstools.rawInstall("DESTDIR=%s -C ../man-pages-posix-2003-a" % get.installDIR())
# These come from attr
pisitools.remove("/usr/share/man/man2/flistxattr.2")
pisitools.remove("/usr/share/man/man2/removexattr.2")
pisitools.remove("/usr/share/man/man2/fgetxattr.2")
pisitools.remove("/usr/share/man/man2/fsetxattr.2")
pisitools.remove("/usr/share/man/man2/lsetxattr.2")
pisitools.remove("/usr/share/man/man2/lremovexattr.2")
pisitools.remove("/usr/share/man/man2/listxattr.2")
pisitools.remove("/usr/share/man/man2/getxattr.2")
pisitools.remove("/usr/share/man/man2/setxattr.2")
pisitools.remove("/usr/share/man/man2/llistxattr.2")
pisitools.remove("/usr/share/man/man2/fremovexattr.2")
pisitools.remove("/usr/share/man/man2/lgetxattr.2")
# These come from libcap
pisitools.remove("/usr/share/man/man2/capget.2")
pisitools.remove("/usr/share/man/man2/capset.2")
# Comes from xorg-input
pisitools.remove("/usr/share/man/man4/mouse.4")
pisitools.dodoc("man-pages-*.Announce", "README")
| [
"yusuf.aydemir@istanbul.com"
] | yusuf.aydemir@istanbul.com |
67fa6bad0b3fbe89d5c78e8cb9d16e3f6de865da | daef437c4cd7ed6ca4be3989f6c7b990ee1efcc7 | /djangogirls/asgi.py | 71b39e44a12e3493104e2d080c24e3277d71d1c8 | [] | no_license | YY-U/Myblog-web-app | b157624f5cd56b3b0b56d30f40fe5ae62973c78f | fd90497dc0aa7e6b3efb6e6d5a24c165fde6de33 | refs/heads/main | 2023-08-28T05:13:40.068384 | 2021-09-23T14:33:34 | 2021-09-23T14:33:34 | 401,692,821 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | """
ASGI config for djangogirls project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangogirls.settings')
application = get_asgi_application()
| [
"chxgw030@yahoo.co.jp"
] | chxgw030@yahoo.co.jp |
9adaee7940a1f0dbfd276202c2578faa1ef36ad7 | a97792ea5b48c1d0aacca906bc2d393b59ceb59a | /accounts/migrations/0001_initial.py | 3b079163e2466e9166ba44051a16589ed2ecc286 | [] | no_license | ebrahimasifismail/custom_login_view_django | 0de9e66846d19101506232e67c2a32777f70bdff | c1d8b3aa712b601f2c9f0e8cbeaf9080d329f00d | refs/heads/main | 2023-07-25T17:52:11.821384 | 2021-09-09T14:36:56 | 2021-09-09T14:36:56 | 404,233,445 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 862 | py | # Generated by Django 3.2.7 on 2021-09-04 15:14
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MyUser',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('user_type', models.CharField(choices=[('PTNT', 'Patient'), ('DOCT', 'Doctor'), ('NURS', 'Nurse'), ('ADMN', 'Admin')], default='PTNT', max_length=4)),
],
options={
'abstract': False,
},
),
]
| [
"ebrahimasifismail@gmail.com"
] | ebrahimasifismail@gmail.com |
5223e23b482a1bfdb8243afce164d650eb01a992 | 6d1af2b4930d6454ad215a645a6ee2e8900ec45d | /tests/context.py | d05a6b307936e3dc2d9c135d32f2382d7a94e390 | [
"MIT"
] | permissive | orange-erotic-bible/orange-erotic-bible | 7cc8c477118dea8e20e03883029b79cd57e788a9 | cd9e31f44ccdf4b69782c5a21af25407cfe5418b | refs/heads/master | 2023-01-13T04:17:57.275994 | 2020-04-05T12:09:10 | 2020-04-05T12:09:10 | 225,234,684 | 49 | 2 | MIT | 2022-12-26T21:00:58 | 2019-12-01T21:38:03 | Python | UTF-8 | Python | false | false | 142 | py | import os
import sys
module_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, module_path)
import oeb
| [
"57139857+orange-erotic-bible@users.noreply.github.com"
] | 57139857+orange-erotic-bible@users.noreply.github.com |
5bd0502dc889f757281d2e3246e31016f6681c03 | cf9f56f50d947cfb9490a9b3a25aec1e7506066f | /lpthw/ex20.py | 92462c8b1a9c2a23781fcd60cbdb6fff519aa234 | [] | no_license | Nipuncp/lyceaum | ce14758ae4834031e47df731de64a52b965f32db | 4ba26b89915786656bd93e1e62aa3002b59aab2b | refs/heads/master | 2020-03-26T08:24:19.309122 | 2018-08-14T09:28:00 | 2018-08-14T09:28:00 | 144,699,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 612 | py | from sys import argv
script, input_file = argv
def print_all(f):
print(f.read())
def rewind(f):
f.seek(0)
def print_a_line(line_count, f):
print(line_count, f.readline())
current_file = open(input_file)
print("First let's print the whole file: \n")
print_all(current_file)
print("Now let's rewind. Kind of like a tape.")
rewind(current_file)
print("Let's print three lines:")
current_line = 1
print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)
| [
"nipuncp123@gmail.com"
] | nipuncp123@gmail.com |
e09fa967d92023bca99cf54837fa105b810f030b | 4cb1fd4f6f4a8e57ca3d222007c1a5cde3deebfe | /Chapter04/pingmesh.py | 8fb17a404fcdee3635ee36367d652fcfefc59daf | [
"MIT"
] | permissive | PacktPublishing/Python-Network-Programming | 838097e82762fed54fbc2abffe4db71d5acd350b | 416cea10fe56b2312cf25150ed7ba27837cddf18 | refs/heads/master | 2023-02-11T00:34:43.611524 | 2023-01-30T08:58:32 | 2023-01-30T08:58:32 | 166,771,428 | 52 | 51 | null | null | null | null | UTF-8 | Python | false | false | 2,532 | py | import getmeshvalue
from getmeshvalue import getallvalues
getdevinformation={}
devicenamemapping={}
arraydeviceglobal=[]
pingmeshvalues={}
arraydeviceglobal=["192.168.255.240","192.168.255.245","192.168.255.248","192.168.255.249","4.2.2.2"]
devicenamemapping['192.168.255.240']="R1"
devicenamemapping['192.168.255.245']="R2"
devicenamemapping['192.168.255.248']="R3"
devicenamemapping['192.168.255.249']="R4"
devicenamemapping['4.2.2.2']="Random"
def getmeshvalues():
global arraydeviceglobal
global pingmeshvalues
arraydeviceglobal=sorted(set(arraydeviceglobal))
tval=getallvalues(arraydeviceglobal)
pingmeshvalues = dict(tval)
getmeshvalues()
def createhtml():
global arraydeviceglobal
fopen=open("C:\pingmesh\pingmesh.html","w") ### this needs to be changed as web path of the html location
head="""<html><head><meta http-equiv="refresh" content="60" ></head>"""
head=head+"""<script type="text/javascript">
function updatetime() {
var x = new Date(document.lastModified);
document.getElementById("modified").innerHTML = "Last Modified: "+x+" ";
}
</script>"""+"<body onLoad='updatetime();'>"
head=head+"<div style='display: inline-block;float: right;font-size: 80%'><h4><h4><p id='modified'></p></div>"
head=head+"<div style='display: inline-block;float: left;font-size: 90%'></h4><center><h2>Network Health Dashboard<h2></div>"
head=head+"<br><div><table border='1' align='center'><caption><b>Ping Matrix</b></caption>"
head=head+"<center><br><br><br><br><br><br><br><br>"
fopen.write(head)
dval=""
fopen.write("<tr><td>Devices</td>")
for fromdevice in arraydeviceglobal:
fopen.write("<td><b>"+devicenamemapping[fromdevice]+"</b></td>")
fopen.write("</tr>")
for fromdevice in arraydeviceglobal:
fopen.write("<tr>")
fopen.write("<td><b>"+devicenamemapping[fromdevice]+"</b></td>")
for todevice in arraydeviceglobal:
askvalue=fromdevice+":"+todevice
if (askvalue in pingmeshvalues):
getallvalues=pingmeshvalues.get(askvalue)
bgcolor='lime'
if (getallvalues == "False"):
bgcolor='salmon'
fopen.write("<td align='center' font size='2' height='2' width='2' bgcolor='"+bgcolor+"'title='"+askvalue+"'>"+"<font color='white'><b>"+getallvalues+"</b></font></td>")
fopen.write("</tr>\n")
fopen.write("</table></div>")
fopen.close()
createhtml()
print("All done!!!!")
| [
"rutujay@packt.com"
] | rutujay@packt.com |
678b524eb856b015ab6f89278db01e09ee25aeee | c1dbc149888d9e17023c7efea77a1bcba21b5215 | /haarcasc.py | 96dc7a5b740f24b72236d3326c2c232c775b065a | [] | no_license | aneri2807/Opencv_scripts | 8361c0983425cae75dd546ddde6b5d35790564e5 | c842284be5208d83fd78d8576b8b7cbfd7c5c461 | refs/heads/master | 2021-04-12T11:14:15.069227 | 2018-03-22T15:22:14 | 2018-03-22T15:22:14 | 126,353,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 984 | py | import cv2
import numpy as np
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')
cap = cv2.VideoCapture(0)
while True:
ret, img = cap.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x,y, w, h) in faces:
cv2.rectangle(img, (x,y), (x+w, y+h), (255,0,0), 2)
roi_gray = gray[y:y+h, x:x+w] #region of image y,x
roi_color = img[y:y+h, x:x+w]
eyes = eye_cascade.detectMultiScale(roi_gray)
for (ex,ey,ew,eh) in eyes:
cv2.rectangle(roi_color, (ex, ey), (ex+ew, ey+eh), (0,255,0), 2)
## font = cv2.FONT_HERSHEY_SIMPLEX
## cv2.putText(roi_color,'EYE',(ex-ew,ey-eh), font, 0.5, (11,255,255), 2, cv2.LINE_AA)
cv2.imshow('img',img)
k = cv2.waitKey(30) & 0xff
if k == 27:
break
cap.release()
cv2.destroyAllWindows()
| [
"aneri2807@gmail.com"
] | aneri2807@gmail.com |
b70f5aaaa9a23b47d5833e61e54bf71c2eab1650 | cf6fc33bb2a3790ddb5dacfc9246e3c42f4c3812 | /firstTier.py | cf4b3aa531961466b8a9ab9aa1df3f5bd164a881 | [] | no_license | oliverwangyi/Software-Supply-chain-Risk-Analysis- | 6965dc936c8d6a85f593285463cf18487477c816 | 166e5b309c8820a3cf5a64c2544c354f58fc853c | refs/heads/master | 2020-04-01T02:40:24.963984 | 2018-10-12T18:01:37 | 2018-10-12T18:01:37 | 152,789,676 | 0 | 0 | null | 2018-10-12T18:03:42 | 2018-10-12T18:03:41 | null | UTF-8 | Python | false | false | 2,659 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 24 17:21:09 2018
@author: krishna
"""
import glob2
#Creating a list of path for each python file
pyFilesPath = glob2.glob('/home/krishna/Desktop/fall 2018-19/Research/ipythonProject/**/*.py')
# Reading the libraries from each of the python files
firstTierList=[]
pyFilesPath.remove("/home/krishna/Desktop/fall 2018-19/Research/ipythonProject/ipython/IPython/core/tests/nonascii.py")
for files in pyFilesPath:
with open(files) as fs:
for line in fs:
if len(line)>=2:
match = line.split(' ')[0]
if match=="import" or match=="from":
libMatch = line.split(' ')[1]
cnt=0
for item in firstTierList:
if item ==libMatch:
cnt=1
if cnt==0:
firstTierList.append(libMatch)
#Extract only pakages from the list and exclude classes
checkChar='.'
finalList=[]
for item in firstTierList:
flag=0
for charecter in item:
if charecter==checkChar:
flag=1
break
if flag==0:
finalList.append(item)
else:
wordSplit=item.split('.')[0]
finalList.append(wordSplit)
finalList=list(set(finalList))
# Remove itself packages
temp = []
itselfList = []
start = ('.','%','IPython', 'ipython')
for item in finalList:
if item.startswith(start):
itselfList.append(item)
else:
temp.append(item)
finalList=temp.copy()
temp1=[]
for item in finalList:
if item.endswith('\n') or item.endswith(','):
item = item[:-1]
temp1.append(item)
else:
temp1.append(item)
newfinalList = temp1.copy()
newfinalList = list(set(newfinalList))
# Remove standard libraires to get external libraries
standardList = []
externalList = []
externalList = newfinalList.copy()
externalList.pop(0)
with open('standardLibraries.txt') as fs:
standardList = fs.read().splitlines()
checkChar = '.'
finalStandardList = []
for item in standardList:
flag = 0
for charecter in item:
if charecter == checkChar:
flag = 1
break
if flag == 0:
finalStandardList.append(item)
else:
wordSplit = item.split('.')[0]
finalStandardList.append(wordSplit)
finalStandardList = list(set(finalStandardList))
finalExternalList=[item for item in externalList if item not in finalStandardList]
| [
"noreply@github.com"
] | oliverwangyi.noreply@github.com |
ed804e3a0fd86fac05c8659b567fe28785d81b74 | 23004e07eb5923b69063d1a303707245ba185610 | /ADM_Project.py | 0aa88edf533bd86403a217a8cfb554678585e06d | [] | no_license | Vasireddydivya/Santander-Product-Recommendation_DataPrep_Python | 0bba05cbbab745b41bbdb7a0f15482047b0b0f26 | 0bf586237ccd73d796f6ec14e330773e0bdce256 | refs/heads/master | 2021-01-25T09:20:18.714274 | 2017-11-15T02:48:12 | 2017-11-15T02:48:12 | 93,812,415 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,076 | py | import numpy as np
import pandas as pd
#from tsne import bh_sne
import seaborn as sns
import matplotlib.pyplot as plt
#%matplotlib inline
#pylab.rcParams['figure.figsize'] = (10, 6)
def convert_int(dataframe,colname):
dataframe[colname]=pd.to_numeric(dataframe[colname],errors='coerce')
limit_rows=1000000
df=pd.read_csv("C:/Users/vasir/Desktop/ADM/train_ver2.CSV",nrows=limit_rows)
df.head()
unique_ids = pd.Series(df["ncodpers"].unique())
limit_people = 5.2e2
unique_id = unique_ids.sample(n=limit_people)
df = df[df.ncodpers.isin(unique_id)]
print df.describe()
#print df.dtypes
print df['fecha_dato'].dtype
#converting the fecha_dato and fecha_alta to DateTime
df["fecha_dato"]=pd.to_datetime(df["fecha_dato"],format="%Y-%m-%d")
df["fecha_alta"]=pd.to_datetime(df["fecha_alta"],format="%Y-%m-%d")
print df['fecha_dato'].unique()
#
##age is string value in data so converting the age to numeric value
convert_int(df,'age')
print df.dtypes
df["month"] = pd.DatetimeIndex(df["fecha_dato"]).month
#
##checking for null values
print df.isnull().any()
#Data Cleaning
with plt.rc_context(dict(sns.axes_style("whitegrid"),
**sns.plotting_context("notebook",font_scale=1.5))):
sns.distplot(df['age'].dropna(),bins=100,kde=False)
sns.plt.title("Age Distribution")
plt.ylabel("Count")
#I found few outliers below 20 and above 100, so we need to uniformly distribute the graph and replace the NA's with mean or median
#trying with median
df.loc[df.age<18,"age"] = df.loc[(df.age>=18) & (df.age<=30),"age"].median(skipna=True)
df.loc[df.age>100,"age"] = df.loc[(df.age>=30) & (df.age<=100),"age"].median(skipna=True)
df["age"].fillna(df["age"].median(),inplace=True)
df["age"] = df["age"].astype(int)
df.loc[df.age < 18,"age"] = df.loc[(df.age >= 18) & (df.age <= 30),"age"].mean(skipna=True)
df.loc[df.age > 100,"age"] = df.loc[(df.age >= 30) & (df.age <= 100),"age"].mean(skipna=True)
df["age"].fillna(df["age"].mean(),inplace=True)
df["age"] = df["age"].astype(int)
with plt.rc_context(dict(sns.axes_style("whitegrid"),
**sns.plotting_context("notebook",font_scale=1.5))):
sns.distplot(df['age'].dropna(),bins=80,kde=False)
sns.plt.title("Age Distribution")
plt.ylabel("Count")
#Both median and mean plots are almost similar i will use median value because we will not have bias issues.
print df.isnull().any()
#customer seniority and and new customer are correlated because customer seriority is in months and new customer will be '1' if he registered in past 6 months
#check the NA count for both columns
df['ind_nuevo'].isnull().sum()
#convert the customer seniority to int
convert_int(df,'antiguedad')
print df['antiguedad'].dtype
def find_nullVal_Count(dataframe,colname):
return dataframe[colname].isnull().sum()
val=find_nullVal_Count(df,'ind_nuevo')
print val
df['ind_nuevo'].isnull().sum()
months_active=df.loc[df["ind_nuevo"].isnull(),:].groupby("ncodpers",sort=False).size()
months_active.max()
df.loc[df['ind_nuevo'].isnull(),"ind_nuevo"]=1
print df.isnull().any()
df['antiguedad'].isnull().sum()
print df['antiguedad'].dtype
print df.loc[df['antiguedad'].isnull(),"ind_nuevo"].describe()
df.loc[df['antiguedad'].isnull(),"antiguedad"]=df.antiguedad.min()
df.loc[df['antiguedad']<0,"antiguedad"]=0
print df.isnull().any()
#indrel
df['indrel'].isnull().sum()
pd.Series([i for i in df.indrel]).value_counts()
df.loc[df['indrel'].isnull(),"indrel"]=1
print df.isnull().any()
#tipodom,customer's province
df.drop(['tipodom','cod_prov'],axis=1,inplace=True)
print df.isnull().any()
#fecha_alta
dates=df.loc[:,'fecha_alta'].sort_values().reset_index()
date_value=int(np.median(dates.index.values))
print date_value
df.loc[df['fecha_alta'].isnull(),"fecha_alta"]=dates.loc[date_value,"fecha_alta"]
df["fecha_alta"].describe()
print df.isnull().any()
df['nomprov'].unique()
df.loc[df['nomprov']=='CORU\xc3\x91A, A',"nomprov"]="CORUNA, A"
df.loc[df['nomprov'].isnull(),"nomprov"]="UNKNOWN"
df['nomprov'].unique()
print df.isnull().any()
df['ind_nom_pens_ult1'].isnull().sum()
df.loc[df['ind_nom_pens_ult1'].isnull(),"ind_nom_pens_ult1"]=0
df.loc[df['ind_nomina_ult1'].isnull(),"ind_nomina_ult1"]=0
print df.isnull().any()
pd.Series([i for i in df.indfall]).value_counts()
df.loc[df['indfall'].isnull(),"indfall"]='N'
pd.Series([i for i in df.tiprel_1mes]).value_counts()
df.loc[df['tiprel_1mes'].isnull(),"tiprel_1mes"]='A'
df.tiprel_1mes = df.tiprel_1mes.astype("category")
map_dict = { 1.0 : "1",
"1.0" : "1",
"1" : "1",
"3.0" : "3",
"P" : "P",
3.0 : "3",
2.0 : "2",
"3" : "3",
"2.0" : "2",
"4.0" : "4",
"4" : "4",
"2" : "2"}
df.indrel_1mes.fillna("P",inplace=True)
df.indrel_1mes = df.indrel_1mes.apply(lambda x: map_dict.get(x,x))
df.indrel_1mes = df.indrel_1mes.astype("category")
print df.isnull().any()
pd.Series([i for i in df.canal_entrada]).value_counts()
missing_col=['ind_empleado','pais_residencia','sexo','canal_entrada','conyuemp']
for col in missing_col:
df.loc[df[col].isnull(),col]='UNKNOWN'
print df.isnull().any()
pd.Series([i for i in df.indext]).value_counts()
df.loc[df['indext'].isnull(),"indext"]='N'
print df.isnull().any()
pd.Series([i for i in df.indresi]).value_counts()
df.loc[df['indresi'].isnull(),"indresi"]='S'
print df.isnull().any()
pd.Series([i for i in df.ult_fec_cli_1t]).value_counts()
pd.Series([i for i in df.ind_actividad_cliente]).value_counts()
print df['ult_fec_cli_1t'].isnull().sum()
df.loc[df.ind_actividad_cliente.isnull(),"ind_actividad_cliente"] = df["ind_actividad_cliente"].median()
print df['ult_fec_cli_1t'].dtype
#Data Visualizations
import numpy as np
import pandas as pd
#from tsne import bh_sne
import seaborn as sns
import matplotlib.pyplot as plt
from bokeh.charts import Histogram,Bar
from bokeh.io import gridplot, output_file, show
from bokeh.plotting import figure
from bokeh.layouts import row
#%matplotlib inline
#pylab.rcParams['figure.figsize'] = (10, 6)
def convert_int(dataframe,colname):
dataframe[colname]=pd.to_numeric(dataframe[colname],errors='coerce')
limit_rows=1000000
df=pd.read_csv("C:/Users/vasir/Desktop/ADM/train_ver2.CSV",nrows=limit_rows)
convert_int(df,'age')
print df['age'].dtype
df['age']=df['age'].fillna(-1);
cols=['age']
df[cols]=df[cols].applymap(np.int64)
df_frac=df.sample(frac=0.01)
p_age=Histogram(df_frac,values='age',title="Age Distribution")
#show(p_age)
dffrac1=df_frac.dropna(subset=['sexo'],how='any')
dffrac1.head()
#dffrac1['sexo']=dffrac1['sexo'].astype('category')
p=Bar(dffrac1,'sexo',title="Sex")
#show(p)
dffrac2=df_frac.dropna(subset=['renta'],how='any')
bar_renta=Bar(dffrac2,values='renta',label='nomprov',agg='mean',title="City Vs Renta",legend=False, plot_width=800)
#show(bar_renta)
features_columns=df.filter(regex='ind_+.*ult.*');
features=features_columns.columns.values;
#print features;
df1=df[features]
feature=features.tolist();
print feature;
df_na=df_frac.dropna(subset=['ind_nom_pens_ult1','ind_nomina_ult1'],how='any')
df_bar=Bar(df_na,label='ind_nom_pens_ult1',values='age',group='sexo',title="Sex vs ind_nom_pens_ult1", plot_width=200)
df_bar1=Bar(df_na,label='ind_nomina_ult1',values='age',group='sexo',title="Sex vs ind_nomina_ult1",legend=False, plot_width=200)
df_bar2=Bar(df_na,label='ind_ahor_fin_ult1',values='age',group='sexo',title="Sex vs ind_ahor_fin_ult1",legend=False, plot_width=200)
df_bar3=Bar(df_na,label='ind_aval_fin_ult1',values='age',group='sexo',title="Sex vs ind_aval_fin_ult1",legend=False, plot_width=200)
df_bar4=Bar(df_na,label='ind_cco_fin_ult1',values='age',group='sexo',title="Sex vs ind_cco_fin_ult1",legend=False, plot_width=200)
df_bar5=Bar(df_na,label='ind_cder_fin_ult1',values='age',group='sexo',title="Sex vs ind_cder_fin_ult1",legend=False, plot_width=200)
df_bar6=Bar(df_na,label='ind_cno_fin_ult1',values='age',group='sexo',title="Sex vs ind_cno_fin_ult1",legend=False, plot_width=200)
df_bar7=Bar(df_na,label='ind_ctju_fin_ult1',values='age',group='sexo',title="Sex vs ind_ctju_fin_ult1",legend=False, plot_width=200)
df_bar8=Bar(df_na,label='ind_ctma_fin_ult1',values='age',group='sexo',title="Sex vs ind_ctma_fin_ult1",legend=False, plot_width=200)
df_bar9=Bar(df_na,label='ind_ctop_fin_ult1',values='age',group='sexo',title="Sex vs ind_ctop_fin_ult1",legend=False, plot_width=200)
df_bar10=Bar(df_na,label='ind_ctpp_fin_ult1',values='age',group='sexo',title="Sex vs ind_ctpp_fin_ult1",legend=False, plot_width=200)
df_bar11=Bar(df_na,label='ind_deco_fin_ult1',values='age',group='sexo',title="Sex vs ind_deco_fin_ult1",legend=False, plot_width=200)
df_bar12=Bar(df_na,label='ind_deme_fin_ult1',values='age',group='sexo',title="Sex vs ind_deme_fin_ult1",legend=False, plot_width=200)
df_bar13=Bar(df_na,label='ind_dela_fin_ult1',values='age',group='sexo',title="Sex vs ind_dela_fin_ult1",legend=False, plot_width=200)
df_bar14=Bar(df_na,label='ind_ecue_fin_ult1',values='age',group='sexo',title="Sex vs ind_ecue_fin_ult1",legend=False, plot_width=200)
df_bar15=Bar(df_na,label='ind_fond_fin_ult1',values='age',group='sexo',title="Sex vs ind_fond_fin_ult1",legend=False, plot_width=200)
df_bar16=Bar(df_na,label='ind_hip_fin_ult1',values='age',group='sexo',title="Sex vs ind_hip_fin_ult1",legend=False, plot_width=200)
df_bar17=Bar(df_na,label='ind_plan_fin_ult1',values='age',group='sexo',title="Sex vs ind_plan_fin_ult1",legend=False, plot_width=200)
df_bar18=Bar(df_na,label='ind_pres_fin_ult1',values='age',group='sexo',title="Sex vs ind_pres_fin_ult1",legend=False, plot_width=200)
df_bar19=Bar(df_na,label='ind_reca_fin_ult1',values='age',group='sexo',title="Sex vs ind_reca_fin_ult1",legend=False, plot_width=200)
df_bar20=Bar(df_na,label='ind_tjcr_fin_ult1',values='age',group='sexo',title="Sex vs ind_tjcr_fin_ult1",legend=False, plot_width=200)
df_bar21=Bar(df_na,label='ind_valo_fin_ult1',values='age',group='sexo',title="Sex vs ind_valo_fin_ult1",legend=False, plot_width=200)
df_bar22=Bar(df_na,label='ind_viv_fin_ult1',values='age',group='sexo',title="Sex vs ind_viv_fin_ult1",legend=False, plot_width=200)
df_bar23=Bar(df_na,label='ind_recibo_ult1',values='age',group='sexo',title="Sex vs ind_recibo_ult1",legend=False, plot_width=200)
show(df_bar)
#fig=figure();
#fig.add_glyphs(df_bar.get_glyphs())
#fig.add_glyphs(bar_renta.get_glyphs())
#fig.add_glyphs(p_age.get_glyphs())
output_file('visulize.html')
p=gridplot([df_bar,df_bar1,p_age],[df_bar2,df_bar3,df_bar4],[df_bar5,df_bar6,df_bar7])
show(p)
df['ind_ahor_fin_ult1'].isnull().sum()
df['ind_nom_pens_ult1'].isnull().sum()
df['ind_nomina_ult1'].isnull().sum()
df['ind_recibo_ult1'].isnull().sum()
| [
"noreply@github.com"
] | Vasireddydivya.noreply@github.com |
c4d8e805c1f6b10bf7e2aa5d93ed4a594334fcc9 | a7fce57f807f35ae9c417f03c8a92a431362a0ce | /app/models/base.py | 588953c21c2f9b63e6bd7f1c170b0b0f6818e57d | [] | no_license | ONSdigital/census-rm-case-processor-prototype-python | 5e9cf1645b63736bde9487508c1009946c41f7be | dfeeeecbae94b8bbedcc85c12b56699488b04f48 | refs/heads/master | 2020-05-14T05:08:33.683697 | 2019-04-16T14:58:37 | 2019-04-16T14:58:37 | 181,700,008 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | from sqlalchemy.schema import MetaData
from sqlalchemy.ext.declarative import declarative_base
metadata = MetaData(schema='casesvcv2python')
Base = declarative_base(metadata=metadata)
| [
"neale.dj@gmail.com"
] | neale.dj@gmail.com |
dced842cb0138a15c1c028b64d73971e74c64f60 | 9d37286183243fd2a040466f0df0b3b31f69ef6a | /hunt/wsgi.py | b0853fef3d467722dedf7cf93cef1e236a011cd8 | [] | no_license | ma9shah/Product-Hunt-Django | 4a5b95db2d042718f374d3fd7cd771b4b0d196ed | 19bbfb72889ebce6813a0845df10787f8f7a93f6 | refs/heads/master | 2022-01-25T15:04:42.207144 | 2019-07-27T08:54:29 | 2019-07-27T08:54:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | """
WSGI config for hunt project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hunt.settings')
application = get_wsgi_application()
| [
"ma9shah@gmail.com"
] | ma9shah@gmail.com |
0bf3e08e5e06852a8c723c159ca73d8b4cab3d49 | 8009c659b53e22f06593d849ee373f327372b49a | /Problems/Implement a function/main.py | ba88587a77a4fe3979c0e3cc06606a1288253caf | [] | no_license | rolmos14/bill_splitter | 6f7824b1d0b5a5c6e525c0fe164074ac94f148bd | 36c3ce9888d1089df6cbef4369f2b94a233df190 | refs/heads/master | 2023-06-27T09:23:50.319746 | 2021-07-31T15:27:15 | 2021-07-31T15:27:15 | 391,341,767 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | def get_number(num):
"""Return the number one less than the given positive number.
If the number is nonpositive, return a string "Enter a positive number!".
Arguments:
num -- an integer.
Return values:
An integer one less than the input number.
"""
if num <= 0:
return "Enter a positive number!"
else:
return num - 1
| [
"olmos.rafa@gmail.com"
] | olmos.rafa@gmail.com |
a2de1e5017153780b682b865a1dba29d1ab68342 | b1e304b259d0cbca2cb86640237e24be7d6f870f | /plugin/database_prefab.py | 439e22d0057e56ec4f6e491b9225ad1f8eb52a31 | [] | no_license | toshihr/alignbench | 4b30edd42a7dec1a4c36c8fc93e8ec8b3521a3ec | 7000427e3b0b015856893ee43b69dedb63cdf4c3 | refs/heads/master | 2021-07-21T09:10:47.624153 | 2017-10-30T18:33:06 | 2017-10-30T18:33:06 | 108,892,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | py | # -*- coding: utf-8 -*-
RESOURCE['databases'].update({
'prefab1':{
'arcName': 'prefab1.tar.bz2',
},
'prefab2':{
'arcName': 'prefab2.tar.bz2',
},
'prefab3':{
'arcName': 'prefab3.tar.bz2',
},
'prefab4':{
'arcName': 'prefab4.tar.bz2',
},
})
| [
"toshihr@toshihr-mbp.local"
] | toshihr@toshihr-mbp.local |
ca152810fc429ad3a3aa2281e6960067671ebd20 | 5f862a5f0116030adb4ce8d1f66c22e52eb5546f | /test/test_automl/test_smbo.py | 7094e9c51ac478e5b9391d662872db4ddc3f1610 | [
"BSD-3-Clause"
] | permissive | IsoLATionzhw/auto-sklearn | 9c1adbffe8f077471cbf9eb1c0a89d4ab9593220 | a263efb49f7b7f597963bc1e787105ea7615ea75 | refs/heads/master | 2021-07-15T05:47:23.268566 | 2017-10-04T10:08:21 | 2017-10-04T10:08:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,894 | py | import unittest
from autosklearn.smbo import AutoMLSMBO
from autosklearn.metrics import accuracy
from smac.facade.smac_facade import SMAC
from smac.scenario.scenario import Scenario
from smac.tae.execute_ta_run import StatusType
from ConfigSpace import ConfigurationSpace, UniformFloatHyperparameter, Configuration
class TestSMBO(unittest.TestCase):
def test_choose_next(self):
configspace = ConfigurationSpace()
configspace.add_hyperparameter(UniformFloatHyperparameter('a', 0, 1))
configspace.add_hyperparameter(UniformFloatHyperparameter('b', 0, 1))
dataset_name = 'foo'
func_eval_time_limit = 15
total_walltime_limit = 15
memory_limit = 3072
auto = AutoMLSMBO(
config_space=None,
dataset_name=dataset_name,
backend=None,
func_eval_time_limit=func_eval_time_limit,
total_walltime_limit=total_walltime_limit,
memory_limit=memory_limit,
watcher=None,
metric=accuracy
)
auto.config_space = configspace
scenario = Scenario({
'cs': configspace,
'cutoff_time': func_eval_time_limit,
'wallclock_limit': total_walltime_limit,
'memory_limit': memory_limit,
'run_obj': 'quality',
})
smac = SMAC(scenario)
self.assertRaisesRegex(
ValueError,
'Cannot use SMBO algorithm on empty runhistory',
auto.choose_next,
smac
)
config = Configuration(configspace, values={'a': 0.1, 'b': 0.2})
# TODO make sure the incumbent is always set?
smac.solver.incumbent = config
runhistory = smac.solver.runhistory
runhistory.add(config=config, cost=0.5, time=0.5,
status=StatusType.SUCCESS)
auto.choose_next(smac)
| [
"feurerm@informatik.uni-freiburg.de"
] | feurerm@informatik.uni-freiburg.de |
1bcd4f24598b0f823162ecf23a97fcaee260cc6b | be70c1a9e6600cdfb0691e746b7132d0f7f9890b | /bmi.py | 324aa713ddc12e44de9a142a077b0efe55b7f9c4 | [] | no_license | jasonmahony/python | ec16cf10407c40809f9f667828bbb5c8970a47d6 | 7676ee18425114a9084e74d10eb5d75fda2b9236 | refs/heads/master | 2021-06-14T16:42:49.390585 | 2021-05-30T15:29:12 | 2021-05-30T15:29:12 | 74,153,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | def bmi(weight, height):
bmi = (weight/height**2)
print(bmi)
return ("Underweight" if bmi <= 18.5 else "Normal" if bmi <= 25 else "Overweight" if bmi <= 30 else "Obese")
print(bmi(90, 1.80)) | [
"jason.mahony@autodesk.com"
] | jason.mahony@autodesk.com |
be0d2c0404f5a1d0904d36ae3e5636b9dbfb75f9 | 7fa176ce9ef29258ea3711612b0605ee82afd799 | /python/scikit-learn/svm/plot_dbscan.py | 778af1090ffaa7bbb5c36e76a8d5ecbfbdd491df | [] | no_license | fooyou/Exercise | c5137945821ee7f9f21a86f95c06d8e71941c19f | 1a5438d961f1716953b90921aa1ee9d60a97b23e | refs/heads/master | 2022-07-08T13:15:52.481707 | 2018-07-08T03:21:17 | 2018-07-08T03:21:17 | 37,168,041 | 1 | 1 | null | 2022-07-06T20:05:34 | 2015-06-10T01:18:11 | C++ | UTF-8 | Python | false | false | 2,478 | py | # -*- coding: utf-8 -*-
"""
===================================
Demo of DBSCAN clustering algorithm
===================================
Finds core samples of high density and expands clusters from them.
"""
print(__doc__)
import numpy as np
from sklearn.cluster import DBSCAN
from sklearn import metrics
from sklearn.datasets.samples_generator import make_blobs
from sklearn.preprocessing import StandardScaler
##############################################################################
# Generate sample data
centers = [[1, 1], [-1, -1], [1, -1]]
X, labels_true = make_blobs(n_samples=750, centers=centers, cluster_std=0.4,
random_state=0)
X = StandardScaler().fit_transform(X)
##############################################################################
# Compute DBSCAN
db = DBSCAN(eps=0.3, min_samples=10).fit(X)
core_samples_mask = np.zeros_like(db.labels_, dtype=bool)
core_samples_mask[db.core_sample_indices_] = True
labels = db.labels_
# Number of clusters in labels, ignoring noise if present.
n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)
print('Estimated number of clusters: %d' % n_clusters_)
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels_true, labels))
print("Completeness: %0.3f" % metrics.completeness_score(labels_true, labels))
print("V-measure: %0.3f" % metrics.v_measure_score(labels_true, labels))
print("Adjusted Rand Index: %0.3f"
% metrics.adjusted_rand_score(labels_true, labels))
print("Adjusted Mutual Information: %0.3f"
% metrics.adjusted_mutual_info_score(labels_true, labels))
print("Silhouette Coefficient: %0.3f"
% metrics.silhouette_score(X, labels))
##############################################################################
# Plot result
import matplotlib.pyplot as plt
# Black removed and is used for noise instead.
unique_labels = set(labels)
colors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))
for k, col in zip(unique_labels, colors):
if k == -1:
# Black used for noise.
col = 'k'
class_member_mask = (labels == k)
xy = X[class_member_mask & core_samples_mask]
plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,
markeredgecolor='k', markersize=14)
xy = X[class_member_mask & ~core_samples_mask]
plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,
markeredgecolor='k', markersize=6)
plt.title('Estimated number of clusters: %d' % n_clusters_)
plt.show() | [
"liuchaozhenyu@gmail.com"
] | liuchaozhenyu@gmail.com |
d5633a2b848b581a3a034619a61450208a8052e8 | da1d21bb8d0760bfba61cd5d9800400f928868aa | /apps/common/utils/iterables.py | 3d4d2470b42a38d43cc00ac6ac9d420b5e00c8f0 | [] | no_license | biznixcn/WR | 28e6a5d10f53a0bfe70abc3a081c0bf5a5457596 | 5650fbe59f8dfef836503b8092080f06dd214c2c | refs/heads/master | 2021-01-20T23:53:52.887225 | 2014-05-13T02:00:33 | 2014-05-13T02:00:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | py | # -*- coding: utf-8 -*-
from itertools import izip_longest
def grouper(n, iterable, padvalue=None):
"grouper(3, 'abcdefg', 'x') --> ('a','b','c'), ('d','e','f'), ('g','x','x')"
return izip_longest(*[iter(iterable)]*n, fillvalue=padvalue)
| [
"mbc@Mathiass-MacBook-Pro.local"
] | mbc@Mathiass-MacBook-Pro.local |
70f011987d0d83896b55fb107ebb146db7604a64 | 94ed2beea5ac8936f555824a6c799a4c5c810937 | /apps/aula01/cadastro_edital/migrations/0002_auto_20190313_1819.py | 02dc4db4fb0a40ca7b0654e0b90886d8feea2591 | [] | no_license | SarahRaq/handson_django | a4fc851022b4a20d314c149cb582aa6c60f95726 | 5277011ec8a25c3490089c4f7ba137bd5959ae9e | refs/heads/master | 2020-04-28T05:02:23.375150 | 2019-03-15T18:01:04 | 2019-03-15T18:01:04 | 175,004,387 | 0 | 0 | null | 2019-03-11T13:19:33 | 2019-03-11T13:19:33 | null | UTF-8 | Python | false | false | 1,494 | py | # Generated by Django 2.1.7 on 2019-03-13 18:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cadastro_edital', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='pagamento',
name='edital',
),
migrations.RemoveField(
model_name='usuario',
name='groups',
),
migrations.RemoveField(
model_name='usuario',
name='user_permissions',
),
migrations.RemoveField(
model_name='avaliador',
name='usuario',
),
migrations.RemoveField(
model_name='coordenador',
name='usuario',
),
migrations.AddField(
model_name='avaliador',
name='nome',
field=models.CharField(blank=True, max_length=200, null=True, verbose_name='Nome'),
),
migrations.AddField(
model_name='coordenador',
name='nome',
field=models.CharField(blank=True, max_length=200, null=True, verbose_name='Nome'),
),
migrations.AlterField(
model_name='edital',
name='data_publicacao',
field=models.DateField(verbose_name='Data de publicação'),
),
migrations.DeleteModel(
name='Pagamento',
),
migrations.DeleteModel(
name='Usuario',
),
]
| [
"sarahraquelrs@gmail.com"
] | sarahraquelrs@gmail.com |
a4dfcab1929a4549495921aec3b9178da6d67f3e | 7e0ea1a29084f9536e02f6d7dcf9a0fb80babf58 | /core/views.py | e1eeeec7ef5bcd9a087405bb5c4fbdd5e1cfbecf | [] | no_license | ABYARTH/mywallet | a5b0bdbd0d08d22eb55fbc55e61147b92fcc5805 | 8eb0ce84422b55d0211e391269a7716b4f9c90a7 | refs/heads/master | 2021-01-16T20:00:05.470714 | 2016-01-14T15:17:26 | 2016-01-14T15:17:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,800 | py | import requests
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.contrib.auth import login, authenticate
from django.contrib.auth.models import User, AnonymousUser
from django.template import RequestContext
from django.conf import settings
from django.db.models import Q
from django.template.loader import render_to_string
def main_view(request):
if request.method == 'POST':
form_data = request.POST.dict()
if form_data.get('form') == 'signin':
if 'username' in form_data and 'password' in form_data and 'type' in form_data:
user = authenticate(
username=form_data.get('username', ''),
password=form_data.get('password', '')
)
res = requests.post('%s%s' % (settings.DOMAIN, reverse('api:signin')), data=form_data)
if res.ok:
user = authenticate(
username=form_data.get('username', ''),
password=form_data.get('password', '')
)
login(request, user)
return redirect(reverse('core:home'))
elif form_data.get('form') == 'signup':
form_data = request.POST.dict()
res = requests.post('%s%s' % (settings.DOMAIN, reverse('api:signup')), data=form_data)
if res.ok:
if 'biller' in res.json() or 'user' in res.json():
return render(
request,
'main.html',
{'message': 'Ask admin to activate your account you are now registred with MyWallet'}
)
return render(
request,
'main.html',
{'message': 'Kindly login to your account'}
)
return HttpResponse('Invalid data')
return render(request, 'main.html', {})
@login_required(login_url='/mywallet')
def transaction_view(request):
current_user = request.user
context = {}
res = requests.get('%s%s' % (settings.DOMAIN, reverse('api:transactions', args=[current_user.id])))
context['txns'] = res.json()
res = requests.get('%s%s' % (settings.DOMAIN, reverse('api:user', args=[current_user.id])))
context['mywallet_user'] = res.json()
return render(request, 'txn.html', context)
@login_required(login_url='/mywallet')
def billers_view(request):
context = {}
res = requests.get('%s%s' % (settings.DOMAIN, reverse('api:billers')))
context['billers'] = res.json()
return render(request, 'home.html', context)
@login_required(login_url='/mywallet')
def customers_view(request):
context = {}
res = requests.get('%s%s' % (settings.DOMAIN, reverse('api:customers')))
context['customers'] = res.json()
return render(request, 'home.html', context)
@login_required(login_url='/mywallet')
def users_view(request):
context = {}
res = requests.get('%s%s' % (settings.DOMAIN, reverse('api:users')))
context['users'] = res.json()
return render(request, 'home.html', context)
| [
"s.mohanty.006@gmail.com"
] | s.mohanty.006@gmail.com |
dafc3e377763e40bd4c4d5e4406d87111ac9744b | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/interactive-physics-editor/operators/setup_phys_drawing.py | 01edd2b8c2993ca95f30bc14ca621432a93ca02a | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,321 | py | # Copyright (C) 2018 Christopher Gearhart
# chris@bblanimation.com
# http://bblanimation.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# System imports
import bmesh
import math
# Blender imports
import bpy
import bgl
import blf
from bpy_extras.view3d_utils import location_3d_to_region_2d, region_2d_to_location_3d, region_2d_to_origin_3d, region_2d_to_vector_3d
from bpy.types import SpaceView3D
from bpy.props import *
from ..functions import *
class interactive_sim_drawing():
##############################################
# Draw handler function
# from CG Cookie's retopoflow plugin
def ui_start(self):
# # report something useful to user
# bpy.context.area.header_text_set("Click & drag to add bricks (+'ALT' to remove). Press 'RETURN' to commit changes")
# update dpi
prefs = get_preferences(bpy.context)
ui_scale = prefs.view.ui_scale
pixel_size = prefs.system.pixel_size
self.dpi = int(72 * ui_scale * pixel_size)
# add callback handlers
self.cb_pr_handle = SpaceView3D.draw_handler_add(self.draw_callback_preview, (bpy.context, ), 'WINDOW', 'PRE_VIEW')
# self.cb_pv_handle = SpaceView3D.draw_handler_add(self.draw_callback_postview, (bpy.context, ), 'WINDOW', 'POST_VIEW')
# self.cb_pp_handle = SpaceView3D.draw_handler_add(self.draw_callback_postpixel, (bpy.context, ), 'WINDOW', 'POST_PIXEL')
# darken other spaces
self.spaces = [
bpy.types.SpaceClipEditor,
bpy.types.SpaceConsole,
bpy.types.SpaceDopeSheetEditor,
bpy.types.SpaceFileBrowser,
bpy.types.SpaceGraphEditor,
bpy.types.SpaceImageEditor,
bpy.types.SpaceInfo,
bpy.types.SpaceLogicEditor,
bpy.types.SpaceNLA,
bpy.types.SpaceNodeEditor,
bpy.types.SpaceOutliner,
bpy.types.SpaceProperties,
bpy.types.SpaceSequenceEditor,
bpy.types.SpaceTextEditor,
bpy.types.SpaceTimeline,
#bpy.types.SpaceUVEditor, # <- does not exist?
bpy.types.SpaceUserPreferences,
#'SpaceView3D', # <- specially handled
]
self.areas = [ 'WINDOW', 'HEADER' ]
# ('WINDOW', 'HEADER', 'CHANNELS', 'TEMPORARY', 'UI', 'TOOLS', 'TOOL_PROPS', 'PREVIEW')
# self.cb_pp_tools = SpaceView3D.draw_handler_add(self.draw_callback_cover, (bpy.context, ), 'TOOLS', 'POST_PIXEL')
self.cb_pp_props = SpaceView3D.draw_handler_add(self.draw_callback_cover, (bpy.context, ), 'TOOL_PROPS', 'POST_PIXEL')
self.cb_pp_ui = SpaceView3D.draw_handler_add(self.draw_callback_cover, (bpy.context, ), 'UI', 'POST_PIXEL')
self.cb_pp_header = SpaceView3D.draw_handler_add(self.draw_callback_cover, (bpy.context, ), 'HEADER', 'POST_PIXEL')
self.cb_pp_all = [
(s, a, s.draw_handler_add(self.draw_callback_cover, (bpy.context,), a, 'POST_PIXEL'))
for s in self.spaces
for a in self.areas
]
self.draw_preview()
tag_redraw_areas()
def ui_end(self):
# remove callback handlers
if hasattr(self, 'cb_pr_handle'):
SpaceView3D.draw_handler_remove(self.cb_pr_handle, "WINDOW")
del self.cb_pr_handle
if hasattr(self, 'cb_pv_handle'):
SpaceView3D.draw_handler_remove(self.cb_pv_handle, "WINDOW")
del self.cb_pv_handle
if hasattr(self, 'cb_pp_handle'):
SpaceView3D.draw_handler_remove(self.cb_pp_handle, "WINDOW")
del self.cb_pp_handle
if hasattr(self, 'cb_pp_tools'):
SpaceView3D.draw_handler_remove(self.cb_pp_tools, "TOOLS")
del self.cb_pp_tools
if hasattr(self, 'cb_pp_props'):
SpaceView3D.draw_handler_remove(self.cb_pp_props, "TOOL_PROPS")
del self.cb_pp_props
if hasattr(self, 'cb_pp_ui'):
SpaceView3D.draw_handler_remove(self.cb_pp_ui, "UI")
del self.cb_pp_ui
if hasattr(self, 'cb_pp_header'):
SpaceView3D.draw_handler_remove(self.cb_pp_header, "HEADER")
del self.cb_pp_header
if hasattr(self, 'cb_pp_all'):
for s,a,cb in self.cb_pp_all: s.draw_handler_remove(cb, a)
del self.cb_pp_all
tag_redraw_areas()
def draw_callback_preview(self, context):
bgl.glPushAttrib(bgl.GL_ALL_ATTRIB_BITS) # save OpenGL attributes
try: self.draw_preview()
except: interactive_physics_handle_exception()
bgl.glPopAttrib() # restore OpenGL attributes
# def draw_callback_postview(self, context):
# # self.drawing.update_dpi()
# # self.drawing.set_font_size(12, force=True)
# # self.drawing.point_size(1)
# # self.drawing.line_width(1)
# bgl.glPushAttrib(bgl.GL_ALL_ATTRIB_BITS) # save OpenGL attributes
# try: self.draw_postview()
# except: handle_exception()
# bgl.glPopAttrib() # restore OpenGL attributes
def draw_callback_postpixel(self, context):
bgl.glPushAttrib(bgl.GL_ALL_ATTRIB_BITS) # save OpenGL attributes
try: self.draw_postpixel()
except: handle_exception()
bgl.glPopAttrib() # restore OpenGL attributes
def draw_callback_cover(self, context):
bgl.glPushAttrib(bgl.GL_ALL_ATTRIB_BITS)
bgl.glMatrixMode(bgl.GL_PROJECTION)
bgl.glPushMatrix()
bgl.glLoadIdentity()
bgl.glColor4f(0,0,0,0.5) # TODO: use window background color??
bgl.glEnable(bgl.GL_BLEND)
bgl.glDisable(bgl.GL_DEPTH_TEST)
bgl.glBegin(bgl.GL_QUADS) # TODO: not use immediate mode
bgl.glVertex2f(-1, -1)
bgl.glVertex2f( 1, -1)
bgl.glVertex2f( 1, 1)
bgl.glVertex2f(-1, 1)
bgl.glEnd()
bgl.glPopMatrix()
bgl.glPopAttrib()
def draw_preview(self):
bgl.glEnable(bgl.GL_MULTISAMPLE)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
bgl.glHint(bgl.GL_LINE_SMOOTH_HINT, bgl.GL_NICEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_POINT_SMOOTH)
bgl.glDisable(bgl.GL_DEPTH_TEST)
bgl.glMatrixMode(bgl.GL_MODELVIEW)
bgl.glPushMatrix()
bgl.glLoadIdentity()
bgl.glMatrixMode(bgl.GL_PROJECTION)
bgl.glPushMatrix()
bgl.glLoadIdentity()
# add background gradient
bgl.glBegin(bgl.GL_TRIANGLES)
for i in range(0,360,10):
r0,r1 = i*math.pi/180.0, (i+10)*math.pi/180.0
x0,y0 = math.cos(r0)*2,math.sin(r0)*2
x1,y1 = math.cos(r1)*2,math.sin(r1)*2
bgl.glColor4f(0,0,0.01,0.0)
bgl.glVertex2f(0,0)
bgl.glColor4f(0,0,0.01,0.8)
bgl.glVertex2f(x0,y0)
bgl.glVertex2f(x1,y1)
bgl.glEnd()
bgl.glMatrixMode(bgl.GL_PROJECTION)
bgl.glPopMatrix()
bgl.glMatrixMode(bgl.GL_MODELVIEW)
bgl.glPopMatrix()
def draw_postpixel(self):
dtext = " 'D' for Draw/Cut Tool"
mtext = " 'S' for Merge/Split Tool"
ptext = " 'M' for Material Paintbrush Tool"
# draw instructions text
if self.mode == "DRAW":
text = "Click & drag to add bricks"
self.draw_text_2d(text, position=(50, 250))
text = "+'ALT' to remove"
self.draw_text_2d(text, position=(50, 220))
text = "+'SHIFT' to cut"
self.draw_text_2d(text, position=(50, 190))
dtext = "*" + dtext[1:]
elif self.mode == "MERGE/SPLIT":
text = "Click & drag to merge bricks"
self.draw_text_2d(text, position=(50, 250))
text = "+'ALT' to split horizontally"
self.draw_text_2d(text, position=(50, 220))
text = "+'SHIFT' to split vertically"
self.draw_text_2d(text, position=(50, 190))
mtext = "*" + mtext[1:]
elif self.mode == "PAINT":
text = "Click & drag to paint bricks with target material"
self.draw_text_2d(text, position=(50, 190))
ptext = "*" + ptext[1:]
text = "'RETURN' to commit changes"
self.draw_text_2d(text, position=(50, 160))
# ...api_current/bpy.types.Area.html?highlight=bpy.types.area
header_height = bpy.context.area.regions[0].height # 26px
height = bpy.context.area.height + header_height
# draw tool switcher text
text = "Switch Tools:"
self.draw_text_2d(text, position=(40, height - 200))
self.draw_text_2d(dtext, position=(40, height - 230))
self.draw_text_2d(mtext, position=(40, height - 260))
self.draw_text_2d(ptext, position=(40, height - 290))
# if self.mode == "DRAW":
# text = "Click & drag to add bricks (+'ALT' to remove, +'SHIFT' to cut)"
# elif self.mode == "PAINT":
# text = "Click & drag to paint bricks with target material"
# elif self.mode == "MERGE/SPLIT":
# text = "Click & drag to merge bricks (+'ALT' to split horizontally, +'SHIFT' to split vertically)"
# self.draw_text_2d(text, position=(127, 80))
# text = "Press 'RETURN' to commit changes"
# self.draw_text_2d(text, position=(127, 50))
def draw_text_2d(self, text, font_id=0, color=(1, 1, 1, 1), position=(0, 0)):
# draw some text
bgl.glColor4f(*color)
blf.position(font_id, position[0], position[1], 0)
blf.size(font_id, 11, self.dpi)
blf.draw(font_id, text)
bgl.glColor4f(0.0, 0.0, 0.0, 1.0)
# def draw_centerpoint(color, point, width=1):
# bgl.glLineWidth(width)
# bgl.glColor4f(*color)
# bgl.glBegin(bgl.GL_POINTS)
# bgl.glVertex3f(*point)
#
# def Point_to_depth(self, xyz):
# xy = location_3d_to_region_2d(self.region, self.r3d, xyz)
# if xy is None: return None
# oxyz = region_2d_to_origin_3d(self.region, self.r3d, xy)
# return (xyz - oxyz).length
#
# # def Point2D_to_Vec(self, xy:Point2D):
# # if xy is None: return None
# # return Vector(region_2d_to_vector_3d(self.actions.region, self.actions.r3d, xy))
# #
# # def Point2D_to_Origin(self, xy:Point2D):
# # if xy is None: return None
# # return Point(region_2d_to_origin_3d(self.actions.region, self.actions.r3d, xy))
# #
# # def Point2D_to_Ray(self, xy:Point2D):
# # if xy is None: return None
# # return Ray(self.Point2D_to_Origin(xy), self.Point2D_to_Vec(xy))
# #
# # def Point2D_to_Point(self, xy:Point2D, depth:float):
# # r = self.Point2D_to_Ray(xy)
# # if r is None or r.o is None or r.d is None or depth is None:
# # return None
# # return Point(r.o + depth * r.d)
# #
# # def size2D_to_size(self, size2D:float, xy:Point2D, depth:float):
# # # computes size of 3D object at distance (depth) as it projects to 2D size
# # # TODO: there are more efficient methods of computing this!
# # p3d0 = self.Point2D_to_Point(xy, depth)
# # p3d1 = self.Point2D_to_Point(xy + Vector((size2D,0)), depth)
# # return (p3d0 - p3d1).length
#
# def update_ui_mouse_pos(self):
# if self.loc is None or self.normal is None:
# self.clear_ui_mouse_pos()
# return
# depth = self.Point_to_depth(self.loc)
# if depth is None:
# self.clear_ui_mouse_pos()
# return
# rmat = Matrix.Rotation(self.oz.angle(self.normal), 4, self.oz.cross(self.normal))
# self.hit = True
# self.scale = 1 # self.rfcontext.size2D_to_size(1.0, self.mouse, depth)
# self.hit_p = self.loc
# self.hit_x = Vector(rmat * self.ox)
# self.hit_y = Vector(rmat * self.oy)
# self.hit_z = Vector(rmat * self.oz)
# self.hit_rmat = rmat
#
# def clear_ui_mouse_pos(self):
# ''' called when mouse is moved outside View3D '''
# self.hit = False
# self.hit_p = None
# self.hit_x = None
# self.hit_y = None
# self.hit_z = None
# self.hit_rmat = None
#
# @staticmethod
# @blender_version('<','2.79')
# def update_dpi():
# paintbrush._dpi = get_preferences(bpy.context).system.dpi
# if get_preferences(bpy.context).system.virtual_pixel_mode == 'DOUBLE':
# paintbrush._dpi *= 2
# paintbrush._dpi *= get_preferences(bpy.context).system.pixel_size
# paintbrush._dpi = int(paintbrush._dpi)
# paintbrush._dpi_mult = paintbrush._dpi / 72
#
# @staticmethod
# @blender_version('>=','2.79')
# def update_dpi():
# paintbrush._ui_scale = get_preferences(bpy.context).view.ui_scale
# paintbrush._sysdpi = get_preferences(bpy.context).system.dpi
# paintbrush._pixel_size = get_preferences(bpy.context).system.pixel_size
# paintbrush._dpi = 72 # get_preferences(bpy.context).system.dpi
# paintbrush._dpi *= paintbrush._ui_scale
# paintbrush._dpi *= paintbrush._pixel_size
# paintbrush._dpi = int(paintbrush._dpi)
# paintbrush._dpi_mult = paintbrush._ui_scale * paintbrush._pixel_size * paintbrush._sysdpi / 72
# s = 'DPI information: scale:%0.2f, pixel:%0.2f, dpi:%d' % (paintbrush._ui_scale, paintbrush._pixel_size, paintbrush._sysdpi)
# if s != getattr(paintbrush, '_last_dpi_info', None):
# paintbrush._last_dpi_info = s
# print(s)
#
# def draw_postview(self):
# print("HERE")
# if not self.hit: return
# print("HERE2")
#
# cx,cy,cp = self.hit_x,self.hit_y,self.hit_p
# cs_outer = self.scale * self.radius
# cs_inner = self.scale * self.radius * math.pow(0.5, 1.0 / self.falloff)
# cr,cg,cb = self.color
#
# bgl.glDepthRange(0, 0.999) # squeeze depth just a bit
# bgl.glEnable(bgl.GL_BLEND)
# # self.drawing.line_width(2.0)
# # self.drawing.point_size(3.0)
# bgl.glPointSize(max(1, 3.0 * self._dpi_mult))
#
# ######################################
# # draw in front of geometry
#
# bgl.glDepthFunc(bgl.GL_LEQUAL)
# bgl.glDepthMask(bgl.GL_FALSE) # do not overwrite depth
#
# bgl.glColor4f(cr, cg, cb, 0.75 * self.strength)
# bgl.glBegin(bgl.GL_TRIANGLES)
# for p0,p1 in zip(self.points[:-1], self.points[1:]):
# x0,y0 = p0
# x1,y1 = p1
# outer0 = (cs_outer * ((cx * x0) + (cy * y0))) + cp
# outer1 = (cs_outer * ((cx * x1) + (cy * y1))) + cp
# inner0 = (cs_inner * ((cx * x0) + (cy * y0))) + cp
# inner1 = (cs_inner * ((cx * x1) + (cy * y1))) + cp
# bgl.glVertex3f(*outer0)
# bgl.glVertex3f(*outer1)
# bgl.glVertex3f(*inner0)
# bgl.glVertex3f(*outer1)
# bgl.glVertex3f(*inner1)
# bgl.glVertex3f(*inner0)
# bgl.glEnd()
#
# bgl.glColor4f(1, 1, 1, 1) # outer ring
# bgl.glBegin(bgl.GL_LINE_STRIP)
# for x,y in self.points:
# p = (cs_outer * ((cx * x) + (cy * y))) + cp
# bgl.glVertex3f(*p)
# bgl.glEnd()
#
# # bgl.glColor4f(1, 1, 1, 0.5) # inner ring
# # bgl.glBegin(bgl.GL_LINE_STRIP)
# # for x,y in self.points:
# # p = (cs_inner * ((cx * x) + (cy * y))) + cp
# # bgl.glVertex3f(*p)
# # bgl.glEnd()
#
# bgl.glColor4f(1, 1, 1, 0.25) # center point
# bgl.glBegin(bgl.GL_POINTS)
# bgl.glVertex3f(*cp)
# bgl.glEnd()
#
# # ######################################
# # # draw behind geometry (hidden below)
# #
# # bgl.glDepthFunc(bgl.GL_GREATER)
# # bgl.glDepthMask(bgl.GL_FALSE) # do not overwrite depth
# #
# # bgl.glColor4f(cr, cg, cb, 0.10 * self.strength)
# # bgl.glBegin(bgl.GL_TRIANGLES)
# # for p0,p1 in zip(self.points[:-1], self.points[1:]):
# # x0,y0 = p0
# # x1,y1 = p1
# # outer0 = (cs_outer * ((cx * x0) + (cy * y0))) + cp
# # outer1 = (cs_outer * ((cx * x1) + (cy * y1))) + cp
# # inner0 = (cs_inner * ((cx * x0) + (cy * y0))) + cp
# # inner1 = (cs_inner * ((cx * x1) + (cy * y1))) + cp
# # bgl.glVertex3f(*outer0)
# # bgl.glVertex3f(*outer1)
# # bgl.glVertex3f(*inner0)
# # bgl.glVertex3f(*outer1)
# # bgl.glVertex3f(*inner1)
# # bgl.glVertex3f(*inner0)
# # bgl.glEnd()
# #
# # bgl.glColor4f(1, 1, 1, 0.05) # outer ring
# # bgl.glBegin(bgl.GL_LINE_STRIP)
# # for x,y in self.points:
# # p = (cs_outer * ((cx * x) + (cy * y))) + cp
# # bgl.glVertex3f(*p)
# # bgl.glEnd()
# #
# # bgl.glColor4f(1, 1, 1, 0.025) # inner ring
# # bgl.glBegin(bgl.GL_LINE_STRIP)
# # for x,y in self.points:
# # p = (cs_inner * ((cx * x) + (cy * y))) + cp
# # bgl.glVertex3f(*p)
# # bgl.glEnd()
#
# ######################################
# # reset to defaults
#
# bgl.glDepthFunc(bgl.GL_LEQUAL)
# bgl.glDepthMask(bgl.GL_TRUE)
#
# bgl.glDepthRange(0, 1)
#
# return
#############################################
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
d691c4b54e635a2d515ac8e2806e29ed04c757c3 | c2285444392e8eb1904255b82f62b49f317aca07 | /tagger/db/models/tag.py | 95a487c8861a4b3aa4e29120ff433e8162fdbc16 | [] | no_license | tlskr/tagger | ced0ed36437bb29fe488eb2fae8b03314c5a9558 | 1230a1f36b91bd7ef2d57840dcfa013ca07e5a4a | refs/heads/master | 2022-12-16T00:44:36.798298 | 2018-08-17T13:23:02 | 2018-08-17T13:26:37 | 145,027,493 | 0 | 0 | null | 2022-12-08T02:46:37 | 2018-08-16T18:55:02 | Python | UTF-8 | Python | false | false | 2,353 | py | # coding: utf-8
from sqlalchemy import Column, Text
from sqlalchemy.dialects.postgresql.json import JSONB
from . base import Base
from tagger.db.session import get_session
class Tag(Base):
__tablename__ = 'tag'
tag_id = Column(Text, primary_key=True)
vendor_id = Column(Text, nullable=False)
tag_metadata = Column(JSONB(astext_type=Text()), nullable=False)
@classmethod
def get_session(cls):
Session = get_session() # TODO: clean up, ugly
return Session()
@classmethod
def list_vendors(cls):
session = cls.get_session()
x = session.query(cls.vendor_id).distinct()
return list(x.all())
@classmethod
def list_tags(cls):
session = cls.get_session()
x = session.query(Tag.tag_id).distinct()
return list(x.all())
@classmethod
def get_tag_instance(cls, tag_id):
''' get Tag instance by tag_id '''
session = cls.get_session()
data = session.query(Tag).filter(Tag.tag_id == tag_id)
return data[0]
@classmethod
def get_tag_data(cls, tag_id):
''' return Tag tag_metadata '''
tag = cls.get_tag_instance(tag_id)
return tag.all_data
@property
def all_data(self):
''' return all data in instance '''
# TODO: there must be a better way
retval = {}
for col in self.__table__.columns:
retval[col.name] = getattr(self, col.name)
return retval
@classmethod
def load_tags(cls, data):
session = cls.get_session()
vendor_id = data['vendor_id']
for tag in data['tags']:
new_tag = {}
for item in tag.get('metadata', []):
if 'key' not in item:
continue
new_tag[item['key']] = item.get('value')
this_tag = Tag(
tag_id=tag['tag_id'],
vendor_id=vendor_id,
tag_metadata=new_tag,
)
session.add(this_tag) # faster to use add_all()?
session.commit()
@classmethod
def query_tag_metadata(cls, qry):
session = cls.get_session()
qrydct = {qry['key']: qry['value']}
data = session.query(Tag).filter(Tag.tag_metadata.contains(qrydct))
return [i.all_data for i in list(data.all())]
| [
"gordon@practicalhorseshoeing.com"
] | gordon@practicalhorseshoeing.com |
a948cb00874c590197218356b4be33325d720557 | 317676b47c60e3a86ef58c93fab14829156a2f27 | /18.1/18.1/_18.1.py | dfd81a7d3b92e9485e3ed4361784def2d932865f | [] | no_license | bellontea/python_lab3 | aa650a6e42997f0b03837bb3850c0590e92a91bc | 756a2a4929a1c3a6a1e26c7dec21f52cf1b44b87 | refs/heads/master | 2023-04-26T20:10:40.406188 | 2021-05-15T20:15:01 | 2021-05-15T20:15:01 | 367,721,242 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | def print_shrug_smile():
print("¯\_(ツ)_/¯")
def print_ktulhu_smile():
print("{:€")
def print_happy_smile():
print("(͡°ʖ ͡°)")
| [
"71792005+bellontea@users.noreply.github.com"
] | 71792005+bellontea@users.noreply.github.com |
da9cb52b8cf4baa8f36751f5f276ece4eef2e963 | cfaa6c0c92e3325fa49b6d6fc0079223bdd67f08 | /pre_processing.py | a8b897f81b2725b50ad4618874e535b9c3304ae5 | [] | no_license | ieee820/metal_crack_detector | e08187d503b4d6e71b1e0625a41491b05e477b7d | 4d1fc779f688046eba10ea6babc8e5a8df1bfde6 | refs/heads/master | 2020-06-30T21:31:48.394439 | 2019-07-23T05:16:02 | 2019-07-23T05:16:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,969 | py | import os
import numpy as np
import cv2
import matplotlib.pyplot as plt
from parameters import RESIZE_DIMENSION
HEALTHY_FENDER_APRON_PATH = './data/YE358311_Fender_apron_2/YE358311_Fender_apron/YE358311_Healthy'
DEFECTIVE_FENDER_APRON_PATH ='./data/YE358311_Fender_apron_2/YE358311_Fender_apron/YE358311_defects' \
'/YE358311_Crack_and_Wrinkle_defect'
def get_all_files():
healthy_images = os.listdir(HEALTHY_FENDER_APRON_PATH)
defetced_images = os.listdir(DEFECTIVE_FENDER_APRON_PATH)
return healthy_images, defetced_images
def pre_processing(data_path):
img = cv2.imread(data_path)
# resizing to fix dimension
resized = cv2.resize(img, RESIZE_DIMENSION, interpolation=cv2.INTER_AREA)
# Converting to Gray scale and apply differention
gray = cv2.cvtColor(resized, cv2.COLOR_BGR2GRAY)
sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=5, scale=2)
sobely = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=5, scale=2)
return sobelx[..., np.newaxis], sobely[..., np.newaxis]
def get_preprocessed_data():
healthy_images, defected_images = get_all_files()
data, target = np.array([]), np.array([])
for image in healthy_images:
grad_x, grad_y = pre_processing(data_path=os.path.join(HEALTHY_FENDER_APRON_PATH, image))
if data.size != 0 and target.size != 0:
data = np.concatenate((data, grad_x[np.newaxis, ...], grad_y[np.newaxis, ...]), axis=0)
target = np.append(target, [1, 1])
else:
data = np.concatenate((grad_x[np.newaxis, ...], grad_y[np.newaxis, ...]), axis=0)
target = np.array([1, 1])
for image in defected_images:
grad_x, grad_y = pre_processing(data_path=os.path.join(DEFECTIVE_FENDER_APRON_PATH, image))
data = np.concatenate((data, grad_x[np.newaxis, ...], grad_y[np.newaxis, ...]), axis=0)
target = np.append(target, [0, 0])
return {'data': data, 'target': target}
| [
"john.janmejaya@gmail.com"
] | john.janmejaya@gmail.com |
82a0c09f77fd08d7efdf8332cfffa06c22aff9ad | b55e0a122670a6a29b0b5e1581865f784ef200b9 | /chats/migrations/0005_alter_message_user.py | b4f25fa34cbf96c09ffe16b6c103ecfea68972d4 | [] | no_license | kattyeye/django-rest-chat-app | fc6b0a23535adb28efeff1d9f1af18ad982ce7bf | 052eadadcccd1e7f77d10f37424156e32b0210de | refs/heads/main | 2023-08-20T19:55:20.388391 | 2021-10-30T14:23:53 | 2021-10-30T14:23:53 | 416,809,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 603 | py | # Generated by Django 3.2.8 on 2021-10-16 13:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('chats', '0004_message_user'),
]
operations = [
migrations.AlterField(
model_name='message',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='users', to=settings.AUTH_USER_MODEL),
),
]
| [
"69052750+kattyeye@users.noreply.github.com"
] | 69052750+kattyeye@users.noreply.github.com |
2fab7f0d353b7e9fa2819146c61ec7b6bc83105a | 0cd096c51dda831ee64c405c7a994d18b0fe783f | /components/joystick.py | 792ba5075cc46b5ada499bf3459d4285c93c6d5d | [] | no_license | Denrur/ecs_turn_based | db1797ed787f644f334e7c08450139febf5ae7ae | 79ff9ea23fe74db7936d81afce8ddca4f0541e6f | refs/heads/master | 2020-07-12T22:47:09.800235 | 2019-11-06T12:47:24 | 2019-11-06T12:47:24 | 204,926,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,084 | py | from bearlibterminal import terminal as blt
class Joystick:
def __init__(self):
self.action = None
def handle_player_turn_keys(self, key):
# r = blt.TK_KEY_RELEASED
if key == blt.TK_UP or key == blt.TK_W:
self.action = {'move': (0, -1)}
return {'move': (0, -1)}
elif key == blt.TK_DOWN or key == blt.TK_X:
self.action = {'move': (0, 1)}
return {'move': (0, 1)}
elif key == blt.TK_LEFT or key == blt.TK_A:
self.action = {'move': (-1, 0)}
return {'move': (-1, 0)}
elif key == blt.TK_RIGHT or key == blt.TK_D:
self.action = {'move': (1, 0)}
return {'move': (1, 0)}
elif key == blt.TK_Q:
self.action = {'move': (-1, -1)}
return {'move': (-1, -1)}
elif key == blt.TK_E:
self.action = {'move': (1, -1)}
return {'move': (1, -1)}
elif key == blt.TK_Z:
self.action = {'move': (-1, 1)}
return {'move': (-1, 1)}
elif key == blt.TK_C:
self.action = {'move': (-1, 1)}
return {'move': (1, 1)}
if key == blt.TK_G:
self.action = {'pickup': True}
return {'pickup': True}
if key == blt.TK_MOUSE_SCROLL:
return {'scroll': True}
# elif key == blt.TK_O:
# return{'scroll_up': True}
# elif key == blt.TK_L:
# return{'scroll_down': True}
elif key == blt.TK_I:
self.action = {'show_inventory': True}
return {'show_inventory': True}
elif key == blt.TK_O:
self.action = {'drop_inventory': True}
return {'drop_inventory': True}
elif key == blt.TK_S:
self.action = {'pass': True}
return {'pass': True}
if key == blt.TK_RETURN and blt.TK_ALT:
return {'fullscreen': True}
elif key == blt.TK_ESCAPE:
return {'exit': True}
if key == 133:
return {'mouse': True}
return {} | [
"denrurak@gmail.com"
] | denrurak@gmail.com |
021841d84dc923f063d7a2f09b346af1e7452d2a | 1d4f4c45da7465f2d39e567de31d79b578002bca | /old/collect_script/depth_crawl.py | 3d9a78ec3a152b89ed30696150fcdb5f702f741b | [] | no_license | lihn1987/CoinCollector | 9f1e7ab45273fea04b809a363b5af2f4fd6f1733 | 3115be42a6bf72e969bbdc245f5bf217b33b25d9 | refs/heads/master | 2022-07-16T07:39:27.269642 | 2021-03-31T16:45:03 | 2021-03-31T16:45:03 | 196,840,232 | 24 | 2 | null | 2022-06-22T04:12:20 | 2019-07-14T13:32:02 | Python | UTF-8 | Python | false | false | 171 | py | import time
import depth_huobi
import depth_ok
import depth_binance
depth_huobi.StartCrwal()
depth_ok.StartCrwal()
depth_binance.StartCrwal()
while True:
time.sleep(1) | [
"lihn1011@163.com"
] | lihn1011@163.com |
9687e2ef03ae47ee8da4844d73ec64b309f7fa7d | 3e95e4399ac1386b6ea21d1ed0841a0da9ed4ed8 | /Code/show_images.py | 16141eca01fd591d7594b76ba86a1acd71b2743b | [] | no_license | sbaio/Restricted-Boltzmann-Machine | 9ddbdef618600c68224ab9f6ffb3d7bced02c00b | e1c304aac444c3a30e29645aa5f9f6e76149f1f4 | refs/heads/master | 2020-01-23T21:41:12.593660 | 2017-01-19T13:42:24 | 2017-01-19T13:42:24 | 74,689,495 | 3 | 0 | null | 2016-12-08T11:13:51 | 2016-11-24T16:29:58 | Python | UTF-8 | Python | false | false | 2,206 | py |
from loadMNIST import load_mnist
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
def showImage(image):
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
imgplot = ax.imshow(image,cmap=mpl.cm.Greys)
imgplot.set_interpolation('nearest')
ax.xaxis.set_ticks_position('top')
ax.yaxis.set_ticks_position('left')
plt.show()
def show_10_Images(image):
fig = plt.figure()
for i in range(10):
ax = fig.add_subplot(2,5,i+1)
imgplot = ax.imshow(image,cmap=mpl.cm.Greys)
imgplot.set_interpolation('nearest')
ax.xaxis.set_ticks_position('top')
ax.yaxis.set_ticks_position('left')
plt.show()
def showImages(images):
# for small number of images
fig = plt.figure()
n = len(images)
for i in range(n):
ax = fig.add_subplot(1,n,i+1)
image = images[i]
imgplot = ax.imshow(image,cmap=mpl.cm.Greys)
imgplot.set_interpolation('nearest')
ax.xaxis.set_ticks_position('top')
ax.yaxis.set_ticks_position('left')
plt.show()
def plot_10_by_10_images(images):
""" Plot 100 MNIST images in a 10 by 10 table. Note that we crop
the images so that they appear reasonably close together. The
image is post-processed to give the appearance of being continued."""
n = images.shape[0]
q = n // 10
r = n%10
print n,q,r
fig = plt.figure()
plt.ion()
for x in range(q):
print x
if not x%10:
plt.clf()
for y in range(10):
ax = fig.add_subplot(10, 10, 10*y+x%10+1)
ax.matshow(images[10*y+x%10], cmap = mpl.cm.binary)
plt.xticks(np.array([]))
plt.yticks(np.array([]))
plt.show()
_=raw_input("Press enter to show next 10")
def generate_random_image():
# generate random image of type uint8 and size 28*28
a = np.random.randint(256,size=28*28,dtype='uint8')
a = a.reshape((28,28))
return a
def image_to_vector(im):
b = np.squeeze(im.reshape((-1,1)))/255.
return b
def vec_to_image(vec):
b = np.reshape(vec,(28,28))
return b
images, labels = load_mnist('training', digits=np.arange(10), path = '../Data/')
a = generate_random_image()
#a = images[0]
#b = np.squeeze(a.reshape((-1,1)))/255.
#print b.shape
#print b[:]
showImage(images[0])
#c = vec_to_image(b)
#showImage(c)
#showImages([a,c])
#showImage(d)
#print c.shape
| [
"otossbai@gmail.com"
] | otossbai@gmail.com |
bfb9ba31588b661323abab54f4fd5873f537d7ca | 42e063977c906d351f463d68bbcfbab6baf32488 | /Train1.py | 79b5edc185a7b3d4209cf0fde6c0c0dcb9e7595b | [] | no_license | Yk1n0gEa/Linear-Regression | 01edac563bfcb7f7eb09ff2f19dcd8a1ed6ee0ac | 0a3c07e84bc402bac032500d0b524f73109789f8 | refs/heads/master | 2020-03-19T01:49:44.275480 | 2018-05-29T09:10:49 | 2018-05-29T09:10:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,404 | py |
import tensorflow as tf
import numpy as np
import pandas as pd
import random
tf.reset_default_graph()
train_user=0
train_time=1
model_path=""
X_csv_train_file_path=""
y_csv_train_file_path=""
Y_prediction_file_path=""
root_directory = "D:\\jd\\model"
#train_object=train_user
train_object=train_time
if train_object == train_user :
model_path = root_directory+"\\User\\train.ckpt"
X_csv_train_file_path = "../data/X_train_user.csv"
y_csv_train_file_path = "../data/y_train_user.csv"
Y_prediction_file_path = "../data/y_pred_user.csv"
if train_object == train_time :
model_path = root_directory+"\\Time\\train.ckpt"
X_csv_train_file_path = "../data/X_train_time.csv"
y_csv_train_file_path = "../data/y_train_time.csv"
Y_prediction_file_path = "../data/y_pred_time.csv"
feature_size=57
dataset_size=0
X1=pd.read_csv(X_csv_train_file_path,header=0)
X1.fillna(0,inplace=True)
X=np.array(X1)
[dataset_size,feature_size]=X.shape
print(X.shape)
maximums, minimums, avgs = X.max(axis=0), X.min(axis=0), X.sum(
axis=0) / X.shape[0]
for i in range(feature_size - 1):
X[:, i] = (X[:, i] - avgs[i]) / (maximums[i] - minimums[i])
Y1 = pd.read_csv(y_csv_train_file_path,header=0)
Y1.fillna(0,inplace=True)
Y=np.array(Y1)
Y=Y.reshape(-1,1)
print(Y.shape)
# 定义训练数据batch的大小
batch_size=100
# 定义神经网络参数
w=tf.Variable(tf.random_normal([feature_size,1],stddev=1,seed=1))
b = tf.Variable(0.0, name="biases",dtype=tf.float32)
x=tf.placeholder(tf.float32,shape=(None,feature_size),name='x-input')
y_=tf.placeholder(tf.float32,shape=(None,1),name='y-input')
# 定义前向传播
y=tf.add(tf.matmul( x,w) ,b)
# 定义损失函数和反向传播算法
#cross_entropy=-tf.reduce_mean(y_*tf.log(tf.clip_by_value(y,1e-10,1.0)))
#train_step=tf.train.AdamOptimizer(0.0001).minimize(cross_entropy)
cross_entropy = tf.reduce_mean(tf.square(y - y_))
#optimizer = tf.train.GradientDescentOptimizer(0.001)
optimizer = tf.train.AdamOptimizer(0.01)#查看不同的优化效果
train_step = optimizer.minimize(cross_entropy)
with tf.Session() as sess:
saver = tf.train.Saver()
init_op=tf.global_variables_initializer()
sess.run(init_op)
print('训练前网络参数的值为:')
print(sess.run(w))
print(sess.run(b))
# 设定训练的轮数 准备做随机批量
STEPS=200000
for i in range(STEPS):
# 每次选取batch_size个样本进行训练
#start=(i*batch_size)%dataset_size
#end=min(start+batch_size,dataset_size)
n=random.randint(0, dataset_size-batch_size)
start=n
end=n+batch_size
# 通过选取的样本训练神经网络并更新参数
sess.run(train_step,feed_dict={x:X[start:end],y_:Y[start:end]})
if i % 100==0:
# 每隔一段时间计算交叉熵并输出
total_cross_entropy=sess.run(cross_entropy,feed_dict={x:X,y_:Y})
print("start:{}: After{} training step(s),cross entropy on all data is {}".
format(start,i,total_cross_entropy))
print('训练后网络参数的值为:')
print(sess.run(w))
print(sess.run(b))
save_path = saver.save(sess, model_path)
print("complete") | [
"noreply@github.com"
] | Yk1n0gEa.noreply@github.com |
15455b46dcf914a8067898dd19c0856955501824 | 9398a9d53c2977df17465cceb500d2568dad311b | /Build a computer/nand2tetris/nand2tetris/projects/11/JackAnalyzer.py | 700bb22f2eaf92ff7d0a779159bdfb7720fca87a | [] | no_license | jianqiangq/coursera | 561650a59ec5768c3687b34788873125ccc8cb1e | 4530d986f33cd2281e43fb6c26fa1af8c302c2c3 | refs/heads/master | 2023-02-27T16:30:30.974446 | 2021-01-28T03:25:23 | 2021-01-28T03:25:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,632 | py | from JackTokenizer import JackTokenizer,TOKEN_TYPE,KEYWORD_TYPE,tokentype,tokendict
import fileinput
import sys, getopt
import os
from enum import Enum, unique
import sys
class JackAnalyzer:
def __init__(self,infile):
# read all source code string to the buffer
self.parser = JackTokenizer(infile)
outfile = infile[:infile.find(".jack")] + ".xml"
self.out = open(outfile,"w")
self.depth = 0
self.compileClass()
self.out.close()
def lowerLevel(self):
self.depth += 1
def upperLevel(self):
self.depth -= 1
def compileOut(self,str):
self.out.write(" "*self.depth)
self.out.write(str)
def compileOutElement(self,tkType,tkStr):
self.out.write(" "*self.depth)
typeStr = ""
if tkType == TOKEN_TYPE.TOKEN_KEYWORD:
typeStr = "keyword"
elif tkType == TOKEN_TYPE.TOKEN_SYMBOL:
typeStr = "symbol"
elif tkType == TOKEN_TYPE.TOKEN_IDENTIFIER:
typeStr = "identifier"
elif tkType == TOKEN_TYPE.TOKEN_INT_CONST:
typeStr = "integerConstant"
elif tkType == TOKEN_TYPE.TOKEN_STRING_CONST:
typeStr = "stringConstant"
elif tkType == TOKEN_TYPE.TOKEN_INVALID:
typeStr = "invalid"
self.out.write("<" + typeStr + "> " + tkStr + " </" + typeStr + ">\n")
def compileClass(self):
self.compileOut("<class>\n")
self.lowerLevel()
# parse class
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "class":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid class define!\n")
exit(1)
# parse class name
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid class define!\n")
exit(1)
#parse symbol '{'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "{":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid class define!\n")
exit(1)
#parse class val des
while self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
(self.parser.keyWord() == "static" or self.parser.keyWord() == "field"):
self.compileClassVarDec()
#parse class method
while self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
(self.parser.keyWord() == "method" or \
self.parser.keyWord() == "constructor" or \
self.parser.keyWord() == "function"):
self.compileSubroutine()
#parse symbol '{'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "}":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid class define!\n")
exit(1)
# end of parse class
self.upperLevel()
self.compileOut("</class>\n")
return True
def compileClassVarDec(self):
self.compileOut("<classVarDec>\n")
self.lowerLevel()
# parse key word
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
(self.parser.keyWord() == "static" or self.parser.keyWord() == "field"):
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid val define!\n")
exit(1)
# parse val type
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD or \
self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid val define!\n")
exit(1)
# parse val name
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid val define!\n")
exit(1)
# parse the left val name
while not (self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ";"):
# parse symbol ','
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == ",":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid val define!\n")
exit(1)
# parse val name
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid val define!\n")
exit(1)
# parse the end symbol
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == ";":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "valid val define!\n")
exit(1)
# end of class var descrtion
self.upperLevel()
self.compileOut("</classVarDec>\n")
return True
def compileSubroutine(self):
self.compileOut("<subroutineDec>\n")
self.lowerLevel()
# parse key word
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
(self.parser.keyWord() == "constructor" or \
self.parser.keyWord() == "function" or \
self.parser.keyWord() == "method"):
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "invalid subroutine!\n")
exit(1)
# parse type
if (self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
(self.parser.keyWord() == "int" or \
self.parser.keyWord() == "char" or \
self.parser.keyWord() == "void" or \
self.parser.keyWord() == "boolean")) or \
self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "invalid subroutine!\n")
exit(1)
# parse subroutineName
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "invalid subroutine!\n")
exit(1)
# parse '('
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == "(":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "invalid subroutine!\n")
exit(1)
# parse param list
self.compileParameterList()
# parse ')'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == ")":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print(str(sys._getframe().f_lineno) + "invalid subroutine!\n")
exit(1)
# parse body
self.compileSubroutineBody()
self.upperLevel()
self.compileOut("</subroutineDec>\n")
return True
def compileSubroutineBody(self):
self.compileOut("<subroutineBody>\n")
self.lowerLevel()
# parse '{'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "{":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("inValid sub routine body define!\n")
exit(1)
# parse var
while self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "var":
self.compileVarDec()
# parse statements
self.compileStatements()
# parse '}'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "}":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid sub routine body define!\n")
exit(1)
self.upperLevel()
self.compileOut("</subroutineBody>\n")
return True
def compileParameterList(self):
self.compileOut("<parameterList>\n")
self.lowerLevel()
# parse rest param
while not (self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and\
self.parser.symbol() == ")"):
# parse first element type
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER or \
(self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
(self.parser.keyWord() == "int" or self.parser.keyWord() == "char" or \
self.parser.keyWord() == "boolean")):
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
# parse first element varName
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
# parse ','
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL:
if self.parser.symbol() == ",":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
elif self.parser.symbol() == ")":
break
else:
print(str(sys._getframe().f_lineno) + "valid param list!\n")
exit(1)
else:
print(str(sys._getframe().f_lineno) + "valid param list!\n")
exit(1)
self.upperLevel()
self.compileOut("</parameterList>\n")
return True
def compileVarDec(self):
self.compileOut("<varDec>\n")
self.lowerLevel()
# parse key word
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "var":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid val define!\n")
exit(1)
# parse var type
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD or \
self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid val define!\n")
exit(1)
# parse var name
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid val define!\n")
exit(1)
# parse the rest var name
while not (self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ";"):
# parse ","
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == ",":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid val define!\n")
exit(1)
# parse var name
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid val define!\n")
exit(1)
# parse the end symbol
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ";":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid var define!\n")
exit(1)
# end of class var descrtion
self.upperLevel()
self.compileOut("</varDec>\n")
return True
def compileStatements(self):
self.compileOut("<statements>\n")
self.lowerLevel()
while self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
(self.parser.keyWord() == "do" or \
self.parser.keyWord() == "if" or \
self.parser.keyWord() == "while" or \
self.parser.keyWord() == "let" or \
self.parser.keyWord() == "return"):
if self.parser.keyWord() == "do":
self.compileDo()
elif self.parser.keyWord() == "if":
self.compileIf()
elif self.parser.keyWord() == "while":
self.compileWhile()
elif self.parser.keyWord() == "let":
self.compileLet()
elif self.parser.keyWord() == "return":
self.compileReturn()
else:
print("valid statement define!\n")
exit(1)
self.upperLevel()
self.compileOut("</statements>\n")
return True
def compileDo(self):
self.compileOut("<doStatement>\n")
self.lowerLevel()
# parse do
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "do":
self.compileOutElement(self.parser.tokenType(),self.parser.keyWord())
self.parser.advance()
else:
print("inValid do define!\n")
exit(1)
# parse '('
while not (self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "("):
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
# parse '('
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "(":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid do statement define!\n")
exit(1)
# parse expression list
self.compileExpressionList()
# parse ')'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ")":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid do statement body define!\n")
exit(1)
# parse ';'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ';':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid do statement define!\n")
exit(1)
self.upperLevel()
self.compileOut("</doStatement>\n")
return True
def compileLet(self):
self.compileOut("<letStatement>\n")
self.lowerLevel()
# parse let
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "let":
self.compileOutElement(self.parser.tokenType(),self.parser.keyWord())
self.parser.advance()
else:
print("inValid let define!\n")
exit(1)
# parse varname
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.identifier())
self.parser.advance()
else:
print("inValid let define!\n")
exit(1)
# parse `[expression]`
while self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == '[':
# parse '['
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
# parse expression
self.compileExpression()
# parse ']'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ']':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid let define!\n")
exit(1)
# parse '='
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == '=':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid let define!\n")
exit(1)
# parse expression
self.compileExpression()
# parse ';'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ';':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid let define!\n")
exit(1)
self.upperLevel()
self.compileOut("</letStatement>\n")
return True
def compileWhile(self):
self.compileOut("<whileStatement>\n")
self.lowerLevel()
# parse return
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "while":
self.compileOutElement(self.parser.tokenType(),self.parser.keyWord())
self.parser.advance()
else:
print("inValid while define!\n")
exit(1)
# parse '('
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == '(':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid while define!\n")
exit(1)
# parse expression
self.compileExpression()
# parse ')'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ')':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid while define!\n")
exit(1)
# parse '{'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == '{':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid while define!\n")
exit(1)
# parse statements
self.compileStatements()
# parse '}'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == '}':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("inValid while define!\n")
exit(1)
self.upperLevel()
self.compileOut("</whileStatement>\n")
return True
def compileReturn(self):
self.compileOut("<returnStatement>\n")
self.lowerLevel()
# parse return
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "return":
self.compileOutElement(self.parser.tokenType(),self.parser.keyWord())
self.parser.advance()
else:
print("valid if return statement!\n")
exit(1)
# parse expression list
if not (self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ';'):
self.compileExpression()
# parse ';'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ';':
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid if return statement!\n")
exit(1)
self.upperLevel()
self.compileOut("</returnStatement>\n")
return True
def compileIf(self):
self.compileOut("<ifStatement>\n")
self.lowerLevel()
# parse if
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "if":
self.compileOutElement(self.parser.tokenType(),self.parser.keyWord())
self.parser.advance()
else:
print("valid if define!\n")
exit(1)
# parse '('
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "(":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid if define!\n")
exit(1)
# parse expression
self.compileExpression()
# parse ')'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ")":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid if define!\n")
exit(1)
# parse '{'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "{":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid if define!\n")
exit(1)
# parse statements
self.compileStatements()
# parse '}'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "}":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid if define!\n")
exit(1)
# parse else
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD and \
self.parser.keyWord() == "else":
# parse 'else'
self.compileOutElement(self.parser.tokenType(),self.parser.keyWord())
self.parser.advance()
# parse '{'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "{":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid if define!\n")
exit(1)
# parse statements
self.compileStatements()
# parse '}'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == "}":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid if define!\n")
exit(1)
self.upperLevel()
self.compileOut("</ifStatement>\n")
return
def compileExpression(self):
self.compileOut("<expression>\n")
self.lowerLevel()
# parse term
self.compileTerm()
# parse op
while self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
(self.parser.symbol() == "+" or self.parser.symbol() == "-" or \
self.parser.symbol() == "*" or self.parser.symbol() == "/" or \
self.parser.symbol() == "&" or self.parser.symbol() == "|" or \
self.parser.symbol() == ">" or self.parser.symbol() == "<" or \
self.parser.symbol() == "="):
# parse op
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
# parse term
self.compileTerm()
self.upperLevel()
self.compileOut("</expression>\n")
return
def compileTerm(self):
self.compileOut("<term>\n")
self.lowerLevel()
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_INT_CONST:
# parse int const
self.compileOutElement(self.parser.tokenType(),self.parser.intVal())
self.parser.advance()
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_STRING_CONST:
# parse string const
self.compileOutElement(self.parser.tokenType(),self.parser.stringVal())
self.parser.advance()
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_KEYWORD:
# parse keword const
if self.parser.keyWord() == "true" or self.parser.keyWord() == "false" or \
self.parser.keyWord() == "null" or self.parser.keyWord() == "this":
self.compileOutElement(self.parser.tokenType(),self.parser.keyWord())
self.parser.advance()
else:
print("inValid expression define!\n")
exit(1)
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL:
if self.parser.symbol() == "(":
# parse '('
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
# parse expression
self.compileExpression()
# parse ')'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
self.parser.symbol() == ")":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid term define!\n")
exit(1)
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and \
(self.parser.symbol() == "-" or self.parser.symbol() == "~"):
# parse unaryOp
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
# parse term
self.compileTerm()
else:
print("valid term define!\n")
exit(1)
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
# parse subroutineName or varName
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
# parse expression
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == "[":
# parse '['
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
# parse expression
self.compileExpression()
# parse ']'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == "]":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid term define!\n")
exit(1)
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == ".":
# parse '.'
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
# parse subroutineName
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_IDENTIFIER:
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid term define!\n")
exit(1)
# parse '('
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == "(":
self.compileOutElement(self.parser.tokenType(),self.parser.symbol())
self.parser.advance()
else:
print("valid term define!\n")
exit(1)
# parse expressList
self.compileExpressionList()
# parse ')'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == ")":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid term define!\n")
exit(1)
elif self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == "(":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
# parse expressList
self.compileExpressionList()
# parse ')'
if self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL and self.parser.symbol() == ")":
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
else:
print("valid term define!\n")
exit(1)
self.upperLevel()
self.compileOut("</term>\n")
return True
def compileExpressionList(self):
self.compileOut("<expressionList>\n")
self.lowerLevel()
if self.parser.symbol() == ')' and \
self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL:
self.upperLevel()
self.compileOut("</expressionList>\n")
return True
# parse expression
self.compileExpression()
# parse `, expression`
while self.parser.symbol() == ',' and \
self.parser.tokenType() == TOKEN_TYPE.TOKEN_SYMBOL:
# parse ','
self.compileOutElement(self.parser.tokenType(),self.parser.currToken())
self.parser.advance()
# parse expression
self.compileExpression()
self.upperLevel()
self.compileOut("</expressionList>\n")
return
def main(input):
if os.path.exists(input):
if os.path.isdir(input):
files = os.listdir(input)
for f in files:
filename = input+"/" + f
if filename.find(".jack") >= 0:
JackAnalyzer(filename)
else:
JackAnalyzer(input)
else:
print("invalid path")
if __name__ == "__main__":
main(sys.argv[1]) | [
"mml1106@126.com"
] | mml1106@126.com |
24477cb63cb6f686691370294340502c0cc654b2 | 10d9756f6c711ae20a0bbcb84f2be1adebb9ed5f | /examples/sparse_jacobi_matrix.py | 2fc5789dbff287ffd673d86521d98f0fd08f1579 | [
"BSD-3-Clause"
] | permissive | paulrozdeba/pyadolc | f44fa5589eeefb4a6b8e5e86625798c40aa529f0 | b3ab24a7181346545697a363ab5a3171d4bb218e | refs/heads/master | 2021-01-01T07:59:13.796652 | 2018-01-21T17:32:52 | 2018-01-21T17:32:52 | 97,529,945 | 0 | 0 | null | 2017-07-17T23:06:38 | 2017-07-17T23:06:37 | null | UTF-8 | Python | false | false | 752 | py | import adolc
import numpy
M,N = 4,2
sparsity_pattern_list = [numpy.random.randint(0,4*N,M)//(3*N) for n in range(N)]
def F(x):
y = numpy.ones(M, dtype=x.dtype)
for n,sp in enumerate(sparsity_pattern_list):
for ns, s in enumerate(sp):
if s == 1:
y[ns] *= x[n]
return y
x = numpy.random.rand(N)
adolc.trace_on(0)
x = adolc.adouble(x)
adolc.independent(x)
y = F(x)
adolc.dependent(y)
adolc.trace_off()
x = numpy.random.rand(N)
y = F(x)
y2 = adolc.function(0,x)
assert numpy.allclose(y,y2)
options = numpy.array([0,0,0,0],dtype=int)
pat = adolc.sparse.jac_pat(0,x,options)
result = adolc.colpack.sparse_jac_no_repeat(0,x,options)
print adolc.jacobian(0,x)
print pat
print result
| [
"sebastian.walter@gmail.com"
] | sebastian.walter@gmail.com |
b3c2d00bbbcb4af4d0d9fc7842cf3e8e929b270b | 659e6db1ae3488fa15f1df551380d8e1b747cfb9 | /model/model.py | b5e337983d3ef2bea60b88444e7a9f66f78e6282 | [] | no_license | wormys/fracture | 17ee9c7216937603c0f6c97eb23da20244001c2a | 39c62201f82d18a144e0507ad74fc96f3604e7c2 | refs/heads/master | 2023-08-24T15:14:49.062061 | 2021-10-18T12:57:54 | 2021-10-18T12:57:54 | 366,652,767 | 0 | 1 | null | 2021-09-26T08:39:04 | 2021-05-12T08:55:13 | Python | UTF-8 | Python | false | false | 2,208 | py | """
Date: 2021/05/10
Author: worith
"""
import torch
import torch.nn.functional as F
class NetX2Y(torch.nn.Module):
def __init__(self, hidden1, hidden2, hidden3, physical_hidden, is_physical_info, n_feature, n_output):
super(NetX2Y, self).__init__()
self.hidden1 = torch.nn.Linear(n_feature, hidden1) # 隐藏层线性输出
self.is_physical_info = is_physical_info
self.hidden2 = torch.nn.Linear(hidden1, hidden2) # 隐藏层线性输出
self.hidden3 = torch.nn.Linear(hidden2, hidden3) # 隐藏层线性输出
if self.is_physical_info:
self.predict = torch.nn.Linear(hidden3 + physical_hidden, n_output) # 输出层线性输出
else:
self.predict = torch.nn.Linear(hidden3, n_output)
# self.dropout = torch.nn.Dropout(p=0.5)
def forward(self, x):
x = torch.relu(self.hidden1(x))
x = torch.relu(self.hidden2(x))
x = torch.relu(self.hidden3(x))
if self.is_physical_info:
x = torch.cat([x, self.physical_info], 1)
x = self.predict(x) # 输出值
return x
def add_physical_info(self, physical_info):
self.physical_info = physical_info
class NetH2Y(torch.nn.Module):
def __init__(self, hidden1, hidden2, hidden3, hidden4, n_feature, n_output):
super(NetH2Y, self).__init__()
self.hidden1 = torch.nn.Linear(n_feature, hidden1) # 隐藏层线性输出
self.hidden2 = torch.nn.Linear(hidden1, hidden2) # 隐藏层线性输出
self.hidden3 = torch.nn.Linear(hidden2, hidden3) # 隐藏层线性输出
self.hidden4 = torch.nn.Linear(hidden3, hidden4) # 隐藏层线性输出
self.predict = torch.nn.Linear(hidden4, n_output) # 输出层线性输出
# self.dropout = torch.nn.Dropout(p=0.5)
def forward(self, x):
x = torch.relu(self.hidden1(x))
x = torch.relu(self.hidden2(x))
x = torch.relu(self.hidden3(x))
x = torch.relu(self.hidden4(x))
physical_info = x
x = self.predict(x) # 输出值
if self.training:
return x
else:
return x, physical_info
| [
"2589978839@qq.com"
] | 2589978839@qq.com |
ef36daca0df48a857bb2a548b85be845df066979 | 1b298205e6164314d5d2d41ef1fa4986494b24e2 | /viterbi.py | a87ac08ce85b47b4f4e208f7a5eefab054d4d5a0 | [] | no_license | ywng/pos-tagger | 8c925fb7e07423e1bc16a3c3ed648d350ee6e44f | 620c4fd8a91f2f3e96479bd6d58ad705d4d6095b | refs/heads/master | 2020-04-27T08:46:29.592314 | 2019-03-08T03:00:53 | 2019-03-08T03:00:53 | 174,185,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,050 | py | from __future__ import division
from const import START_END_OBS, START_END_TAG
import math
class Viterbi:
def __init__(self, obs_space, states, trans_prob, emit_prob):
'''
obs_space: all the possbile value of observations. In POS tagging, it is the vocabulary.
states: the HMM states. In POS tagging, it is the Part of Speech tag.
trans_prob: A matrix K by K, trans_prob[i][j] is prob of state i -> state j,
where K is the size of the states.
emit_prob: A matrix K by N, emit_prob[i][j] is the prob of observing obs_j given state i,
where K is the size of the states, N is the size of the obs_space.
'''
self.obs_space, self.states, self.trans_prob, self.emit_prob = \
obs_space, states, trans_prob, emit_prob
self.obs_space_size = len(self.obs_space)
self.states_size = len(self.states)
#observation index lookup table
self.indexOfObs = {}
for idx, observation in enumerate(self.obs_space):
self.indexOfObs[observation] = idx
def decode(self, obs):
'''
obs: a sequence of observations. In POS tagging, it is a sequence of words/tokens.
'''
seq_len = len(obs)
viterbi = [[0] * seq_len for i in range(self.states_size)]
backptr = [[None] * seq_len for i in range(self.states_size)]
#init start probability
start_tag_idx = self.states.index(START_END_TAG)
first_obs_idx = self.indexOfObs[obs[0]]
for s in range(self.states_size):
backptr[s][0] = 0
if self.trans_prob[start_tag_idx][s] == 0:
viterbi[s][0] = float("-inf")
else:
viterbi[s][0] = math.log(self.trans_prob[start_tag_idx][s]) + \
math.log(self.emit_prob[s][first_obs_idx])
for t in range(1, seq_len):
obs_t_idx = self.indexOfObs[obs[t]]
for curr_s in range(self.states_size):
max_path, max_prob = None, float("-inf")
for prev_s in range(self.states_size):
prob = viterbi[prev_s][t-1] + \
math.log(self.trans_prob[prev_s][curr_s]) + \
math.log(self.emit_prob[curr_s][obs_t_idx])
if prob > max_prob:
max_path, max_prob = prev_s, prob
viterbi[curr_s][t] = max_prob
backptr[curr_s][t] = max_path
#trace backward to get the state sequence path
state_seq = [None] * seq_len
state_idx_seq = [None] * seq_len
#start tracing back from the one with the highest prob,
#in the case of POS tagging, the last one should be an end node.
max_prob = viterbi[0][seq_len-1]
for state_idx in range(1, self.states_size):
if viterbi[state_idx][seq_len-1] > max_prob:
max_prob = viterbi[state_idx][seq_len-1]
state_idx_seq[seq_len-1] = state_idx
state_seq[seq_len-1] = self.states[state_idx_seq[seq_len-1]] #get the actual tag as return result
for t in range(seq_len-1, 0, -1):
state_idx_seq[t-1] = backptr[state_idx_seq[t]][t]
state_seq[t-1] = self.states[state_idx_seq[t-1]]
return state_seq, max_prob
| [
"victor.not.to.yield@gmail.com"
] | victor.not.to.yield@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.