blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eafde55b4128f1e79475991b307f8e91de418586 | 8a283e52a9180b81356e68dd35b1293c3d32aa5c | /task9.py | 505435e338a98373032a0ff4dc1235507216c43b | [] | no_license | irishabharya/k8swebapp | d4458421c9dd7965302533e98817011616d03ae2 | b5822e9b0b39bae1b184db32151b279e4ed8abb1 | refs/heads/main | 2023-06-29T08:10:53.279283 | 2021-08-11T18:04:02 | 2021-08-11T18:04:02 | 395,074,927 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,369 | py | #!/usr/bin/python3
print("content-type: text/html")
print()
import cgi
import subprocess
f= cgi.FieldStorage()
cmd = f.getvalue("x")
pod = f.getvalue("pod")
port = f.getvalue("port")
replica = f.getvalue("replica")
if ("all" in cmd ):
print(subprocess.getoutput("sudo kubectl get pods --kubeconfig admin.conf"))
elif("all" in cmd) and ("deployment" in cmd) :
print(subprocess.getoutput("kubectl get deployment --kubeconfig admin.conf"))
elif("deployment" in cmd) and ("create" in cmd ):
print(subprocess.getoutput("kubectl create deployment {} --image=httpd --kubeconfig admin.conf ".format(pod)))
elif("deployment" in cmd) and ("expose" in cmd ):
print(subprocess.getoutput("kubectl expose deployment {} --port={} --type=NodePort --kubeconfig admin.conf ".format(pod,port)))
elif("create" in cmd ) or ("scale" in cmd ) and (("replica" in cmd ) or ("deployment" in cmd )):
print(subprocess.getoutput("kubectl scale deployment {} --replicas={} --kubeconfig admin.conf ".format(pod,replica)))
elif ("delete" in cmd ) and ("pod" in cmd):
print(subprocess.getoutput("kubectl delete pods {} --kubeconfig admin.conf".format(pod)))
elif("delete" in cmd ) and ("deployment" in cmd ):
print(subprocess.getoutput("kubectl delete deployment {} --kubeconfig admin.conf".format(pod)))
else:
print("Please Enter valid input")
| [
"noreply@github.com"
] | noreply@github.com |
8a32ce25bfea1cadc9dbf675455e80e62c2842db | 54d81ffa9dc1faacb0a2cf0baa9a334bc99faa23 | /day00/ex02/TinyStatistician.py | 1e72ff0de7481fcb678d1b6099fd067cb94e55f7 | [] | no_license | AdrianWR/MachineLearningBootcamp | 94bee5cd1ec30c62390ccd3c4b679223dd011174 | b30f717aaceca02f9fbb273b607a0ec496e432a0 | refs/heads/master | 2022-12-22T17:38:16.883083 | 2020-09-26T15:40:20 | 2020-09-26T15:40:20 | 259,935,134 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py | import math
class TinyStatistician:
@staticmethod
def mean(x):
if not len(x):
return None
return sum(x) / len(x)
@staticmethod
def median(x):
if not len(x):
return None
return TinyStatistician.quartile(x, 50)
@staticmethod
def quartile(x, percentile):
if not len(x):
return None
data = TinyStatistician.__sort(x)
n = len(data)
index = percentile * n / 100
floor = math.floor(index)
if index == floor:
return float((data[floor] + data[floor - 1]) / 2)
return float(data[floor])
@staticmethod
def var(x):
n = len(x)
if not n:
return None
mu = TinyStatistician.mean(x)
sqd = 0 # sum of square deviations
for i in x:
sqd += (i - mu) ** 2
return sqd / n
@staticmethod
def std(x):
if not len(x):
return None
return math.sqrt(TinyStatistician.var(x))
@staticmethod
def __sort(array):
x = array.copy()
for i in range(1, len(x)):
key = x[i]
j = i
while j > 0 and key < x[j - 1]:
x[j] = x[j - 1]
j -= 1
x[j] = key
return x
| [
"adrian.w.roque@gmail.com"
] | adrian.w.roque@gmail.com |
32bbdc3dfa362fb7cf6b2b7a7e8c026eae0eb5f1 | 9a40c85c55d75327d82a6e2010d58faf6aaeff49 | /Website/mysite/settings.py | 91fc5443013ba838a30d491bdefcace58d23f46c | [] | no_license | MostafaHamedAbdelmasoud/Liver-cancer-detection-in-CT-scans | 8b1dab64cb1273d9dae8709435ac05e38ac26e84 | 5a6c3eb4d84ce1badb3a7103c06dc336caab3073 | refs/heads/master | 2022-12-25T00:54:47.344764 | 2020-09-20T14:31:02 | 2020-09-20T14:31:02 | 295,721,623 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,159 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 3.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR = os.path.join(BASE_DIR, 'basicapp/templates/basicapp')
TEMPLATE_DIR_USERS = os.path.join(BASE_DIR, 'users/templates/users')
TEMPLATE_DIR_DEEPMODEL = os.path.join(BASE_DIR, 'deepmodel/templates/deepmodel')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# SECURITY WARNING: don't run with debug turned on in production!
SECRET_KEY = '8lu*6g0lg)9z!ba+a$ehk)xt)x%rxgb$i1&022shmi1jcgihb*'
#SECRET_KEY = os.environ.get("SECRET_KEY")
DEBUG = True
#DEBUG = int(os.environ.get("DEBUG", default=0))
# 'DJANGO_ALLOWED_HOSTS' should be a single string of hosts with a space between each.
# For example: 'DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 [::1]'
#ALLOWED_HOSTS = os.environ.get("DJANGO_ALLOWED_HOSTS").split(" ")
ALLOWED_HOSTS = '*'
#ALLOWED_HOSTS = ['app']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'basicapp',
'django_dicom',
'crispy_forms',
'users',
'deepmodel',
'debug_toolbar',
'dicom'
]
CRISPY_TEMPLATE_PACK = 'bootstrap4'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware'
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR, TEMPLATE_DIR_USERS, TEMPLATE_DIR_DEEPMODEL, ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'shaker',
'PASSWORD': 'a',
'HOST': 'localhost',
'PORT': '5432',
}
}
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
# }
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
APP_DIR = os.path.join(BASE_DIR, 'deepmodel')
STATICFILES_DIRS = [
os.path.join(APP_DIR, "static"),
os.path.join(APP_DIR, "static/DICOM"),
]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
AUTH_USER_MODEL = 'users.CustomUser'
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
LOGIN_URL = 'login'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
INTERNAL_IPS = ['127.0.0.1']
# Activate Django-Heroku.
| [
"mostafa.hamed1944@gmail.com"
] | mostafa.hamed1944@gmail.com |
e73b79844c44e046ec48b55aa0ab8513b9544a39 | 91aafc8666d650526576739efcc8322865693882 | /sdd/sdd5/sbin/rfut.py | 866440a5fb8e28638a93227e1a7e8ae28cb985a4 | [] | no_license | akkmzack/RIOS-8.5 | 186d95b69307ab9b926777715d15ccfefe914962 | 5c0a21a8acf0d9baaa0d837a33ca5d8230ef2ee5 | refs/heads/master | 2021-01-10T14:57:03.254387 | 2016-01-26T06:26:05 | 2016-01-26T06:26:05 | 50,402,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,554 | py | #!/usr/bin/env python
from lxml import etree
import os
from os.path import exists
import sys
import subprocess
from subprocess import Popen
from subprocess import PIPE
from optparse import OptionParser
import syslog
import time
import fileinput
import datetime
DBG_NONE = 0
DBG_ERR = 1
DBG_WAR = 2
DBG_INF = 3
DBG_VBS = 4
DEFAULT_DBG_LEVEL = 1
CURRENT_DBG_LEVEL = DEFAULT_DBG_LEVEL
SAVED_PATH = None
SYSLOG_ENABLE = False
class Upgrade_result:
Failed_no_file, Fail_cmds, Succeed_no_need_upgrade, Succeed_upgrade = range(4)
###############################################################################
#
# printd
#
# print out the messages if the debug level is high enough
#
###############################################################################
def printd(dbg_level, msg):
global CURRENT_DBG_LEVEL
if (dbg_level <= CURRENT_DBG_LEVEL):
now = datetime.datetime.now()
print "rfut: " + str(now) + " " + msg
###############################################################################
#
# run_cd_cmd
#
# execute a linux cd command.
#
###############################################################################
def run_cd_cmd(cmdline):
global SAVED_PATH
if cmdline[3] == '-':
if SAVED_PATH is not None: # switch back to previous path
try:
os.chdir(SAVED_PATH)
except Exception, err:
printd(DBG_WAR, "failed to switch to the old path. Error: %s" %str(err))
return
else: #no previous path. the cd command is cding to the working directory
return
try:
SAVED_PATH = os.getcwd()
printd(DBG_VBS, " new dir is " + cmdline[3:])
os.chdir(cmdline[3:])
except Exception, err:
printd(DBG_WAR, "failed to switch to another path. Error: %s" %str(err))
###############################################################################
#
# run_reboot_cmd
#
# handle reboot command
#
###############################################################################
def run_reboot_cmd():
raise RebootException
###############################################################################
#
# run_shell_cmd
#
# execute a linux shell command.
#
###############################################################################
def run_shell_cmd(cmdline):
# special handling for "cd" command
if (cmdline[0] == 'c' and cmdline[1] == 'd'
and cmdline[2] == ' '):
printd(DBG_VBS, " got a cd command")
run_cd_cmd(cmdline)
return
if ( "reboot" in cmdline or "shutdown -r" in cmdline):
run_reboot_cmd()
pf = Popen(cmdline, shell=True, stdout=PIPE, stderr=PIPE)
(output, errout) = pf.communicate()
printd(DBG_VBS, "out is " + output)
printd(DBG_VBS, "err is " + errout)
if (pf.wait() == 0 and len(errout) == 0):
return output
else:
raise Exception('Unable to run cmd %s, got following error %s %s' %
(cmdline, output, errout))
###############################################################################
#
# check_file
#
# check if a file exist and accessible. Return True if file exists.
#
###############################################################################
def check_file( path, report_warn=True ):
if os.path.exists(path) and os.path.isfile(path) and os.access(path, os.R_OK):
printd(DBG_VBS, path + " exists and is readable")
return True
elif ( report_warn ):
printd(DBG_INF, "Either file is missing or is not readable, file: " + path )
return False
###############################################################################
#
# compare_with_major_minor
#
# Split version strings to Major and minor number and compare them
# the format of the version string should be like this "2.10" or "2.4"
# return false if the current version is >= the expected version
# Otherwise, return true
#
###############################################################################
def compare_with_major_minor(expected_ver_str, cur_ver_str):
[expected_major, expected_minor] = [int(x) for x in expected_ver_str.split('.')]
[current_major, current_minor] = [int(x) for x in cur_ver_str.split('.')]
if ( current_major > expected_major ):
return False
elif ( current_major == expected_major ):
if ( current_minor >= expected_minor ):
return False
return True
###############################################################################
#
# RebootException
#
# Special reboot exception
#
###############################################################################
class RebootException(Exception):
pass
###############################################################################
#
# TeeLog
#
# New class to overload "print" statements to go to screen and logfile
# When this class is used, either -l option or -s option must be specified
#
###############################################################################
class TeeLog:
def __init__(self, stdout, filename):
global SYSLOG_ENABLE
if ( not SYSLOG_ENABLE ) :
self.stdout = stdout
self.logfile = open(filename, 'w')
def write(self, text):
global SYSLOG_ENABLE
if ( SYSLOG_ENABLE ) :
syslog.syslog(syslog.LOG_ERR, text)
else:
self.stdout.write(text)
self.logfile.write(text)
def close(self):
global SYSLOG_ENABLE
if ( SYSLOG_ENABLE ) :
syslog.closelog()
else:
self.stdout.close()
self.logfile.close()
def flush(self):
global SYSLOG_ENABLE
if ( SYSLOG_ENABLE ) :
self.logfile.flush()
else:
self.stdout.flush()
###############################################################################
#
# Binary
#
# Check if a binary exist in the PATH. Return the path or None.
#
###############################################################################
class Binary:
def __init__(self, program):
self.program = program
def whereis(self):
return run_shell_cmd("which " + self.program)
###############################################################################
#
# Action
#
# Base class for all real actions.
#
###############################################################################
class Action:
def add_action_name(self, action_name):
pass
def add_action(self, real_action):
pass
def do_it(self):
pass
def get_cur_fw_ver(self):
pass
###############################################################################
#
# FWItem
#
# Class for each individual firmware upgrade item, such as BIOS, or BMC
#
###############################################################################
class FWItem:
def __init__(self, name):
self.name = name
self.cur_ver_nr = 0
self.cur_ver_str = ""
self.cur_ver_cmd_list = []
def get_cur_ver(self):
for cmd in self.cur_ver_cmd_list:
cmd_output = run_shell_cmd( cmd )
self.cur_ver_str = cmd_output.strip('\n')
printd(DBG_VBS, "%s : cur ver is %s" %(self.category, self.cur_ver_str))
try:
cur_ver_nr = float(self.cur_ver_str)
except ValueError:
printd(DBG_VBS, "%s, cannot convert to fload, must be md5 hash." %(self.category))
continue
else:
self.cur_ver_nr = cur_ver_nr
return
#It is OK that cur_ver_str cannot be convert to float for LSI components
if ("LSI" in self.category and len(self.cur_ver_str) == 32):
return
else:
raise Exception ("%s, wrong format for version string" %self.category)
def need_upgrade( self ):
#compare two version strings. Return true if upgrading is needed. Otherwsie, return false
printd(DBG_INF, "%s : cur ver %s, expected ver %s" %(self.category,
self.cur_ver_str, self.expected_ver_str))
if ( self.expected_ver_str == self.cur_ver_str ):
return False
try:
expected_ver_nr = float(self.expected_ver_str)
except ValueError:
printd(DBG_VBS, "%s, cannot convert to float, must be md5 hash." %(self.category))
return True
else:
printd(DBG_VBS, "%s : cur ver %f, expected ver %f" %(self.category,
float(self.cur_ver_nr), float(self.expected_ver_str)))
if ( expected_ver_nr == self.cur_ver_nr ):
return False
if (self.newer_ok == "yes" ):
if ("BMC" in self.category): #need to splict to major and minor versions
return compare_with_major_minor(self.expected_ver_str, self.cur_ver_str)
else :
if (self.cur_ver_nr >= expected_ver_nr ):
return False
return True
def upgrade(self, retry_times):
"""upgrade the firmware
return value: Upgrade_result.Failed_no_file: upgrade failed because files do not exist
return value: Upgrade_result.Fail_cmds: upgrade failed because cmds failed
return value: Upgrade_result.Succeed_no_need_upgrade: no need to upgrade
return value: Upgrade_result.Succeed_upgrade : upgrade succeed ..."""
printd(DBG_VBS, "in upgrade")
if ( not self.need_upgrade() ):
printd(DBG_INF, "No need to upgrade, cur ver %s" %self.cur_ver_str)
return Upgrade_result.Succeed_no_need_upgrade
printd(DBG_VBS, "Need to upgrade")
if ( not (check_file(self.expected_img) ) ):
printd(DBG_ERR, "Error: cannot find expected image file " + self.expected_img)
return Upgrade_result.Failed_no_file
if ( not ( self.expected_img in self.upgrade_cmd_opt ) ):
printd(DBG_ERR, "Error: Expected image file not in the command line option")
return Upgrade_result.Failed_no_file
for i in range(int(retry_times) + 1) :
try:
full_cmd = self.upgrade_cmd + self.upgrade_cmd_opt
printd(DBG_VBS, "Need to upgrade, cmd " + full_cmd)
printd(DBG_INF, "upgrading %s... this may take several minutes, please wait." %self.category)
run_shell_cmd(full_cmd)
except Exception, err:
printd(DBG_WAR, "upgrade %s failed, cmd %s, error %s" %(self.category, full_cmd, err))
else:
printd(DBG_INF, "upgrade %s succeed" %self.category )
return Upgrade_result.Succeed_upgrade
return Upgrade_result.Fail_cmds
def fall_back(self):
if ( not check_file(self.fallback_img) ):
return False
printd(DBG_INF, "%s : cur ver %s, fall back ver %s" %(self.category,
self.cur_ver_str, self.fallback_ver_str))
try:
fallback_ver_nr = float(self.fallback_ver_str)
except ValueError:
printd(DBG_VBS, "%s, cannot convert fall back ver to float, must be md5 hash." %(self.category))
else:
printd(DBG_VBS, "%s : cur ver %f, fall back ver %f" %(self.category,
float(self.cur_ver_nr), float(self.fallback_ver_str)))
if ( (self.fallback_ver_str != self.cur_ver_str) and (fallback_ver_nr != self.cur_ver_nr) ):
printd(DBG_WAR, "%s : cur ver %s is different from fall back ver %s" %(self.category,
self.cur_ver_str, self.fallback_ver_str))
try:
printd(DBG_INF, "falling back to old image %s... this may take several minutes, please wait." %self.category)
run_shell_cmd(self.upgrade_cmd + self.fallback_cmd_opt)
except Exception, err:
printd(DBG_ERR, "Error: %s: failed to fall back image %s, err msg %s" %(self.category, self.fallback_img, err))
return False
else:
printd(DBG_INF, "%s: fallback to old image %s finished" %(self.category,self.fallback_img))
return True
def to_string(self):
tmpstr = ( "\n" + "name " + self.name + "\n" +
"cur_ver " + self.cur_ver_str + "\n" +
"expected_ver_str " + str(self.expected_ver_str) + "\n"+
"expected_img " + self.expected_img + "\n"+
"upgrade_cmd " + self.upgrade_cmd + "\n"+
"upgrade_cmd_opt " + self.upgrade_cmd_opt + "\n"+
"fallback_img " + self.fallback_img + "\n"+
"fallback_cmd_opt " + self.fallback_cmd_opt )
for cc in self.cur_ver_cmd_list:
tmpstr += cc
tmpstr += "\n"
return tmpstr
###############################################################################
#
# FWTransaction
#
# Class for firmware transactions. Each transaction may contain mulitple FWItem
# For example, LSI controller BIOS and FW could be in one transaction
#
###############################################################################
class FWTransaction(Action):
def __init__(self, name):
self.name = name
self.action_name_list = []
self.action_list = []
self.post_action_name_list = []
self.post_action_list = []
self.changed = False
self.need_reboot = "no"
self.reboot_now = "no"
def add_action_name(self, action_name):
self.action_name_list.append(action_name)
def add_action(self, real_action):
self.action_list.append(real_action)
def add_post_action_name(self, action_name):
self.post_action_name_list.append(action_name)
def add_post_action(self, real_action):
self.post_action_list.append(real_action)
def do_it(self):
printd(DBG_VBS, "tr: in do_it")
finished = 0
failed = False
fall_back_failed = False
global G_alarm_file_path
ret_list = []
for item in self.action_list:
try:
item.get_cur_ver()
except Exception, err:
printd(DBG_ERR, "Failed to get current version, error %s" %err)
return
ret = item.upgrade(self.retry_times)
if ( ret >= Upgrade_result.Succeed_no_need_upgrade):
finished += 1
if ( ret == Upgrade_result.Succeed_upgrade) :
self.changed = True
else:
failed = True
break
if ( failed and (self.if_fall_back == "yes")):
i = 0
for item in self.action_list:
if ( finished == 0 and (ret == Upgrade_result.Failed_no_file) ):
# no need to fall back in this case
break;
if i <= finished :
ret = item.fall_back()
if ( not ret ):
fall_back_failed = True
else:
self.changed = True
i += 1
else:
break
if ( fall_back_failed and self.trigger_alarm == "yes" ):
try:
f = open(G_alarm_file_path, 'w')
except Exception, err:
printd(DBG_ERR, "Failed to open file %s, err %s" %(G_alarm_file_path, err))
else:
try:
f.write(self.name)
f.write(" failed to fall_back to the old image\n")
except Exception, err:
printd(DBG_ERR, "Failed to write to file %s, err %s" %(G_alarm_file_path, err))
finally:
f.close()
elif ( failed and self.trigger_alarm == "yes" ):
try:
f = open(G_alarm_file_path, 'w')
except Exception, err:
printd(DBG_ERR, "Failed to open file %s, err %s" %(G_alarm_file_path, err))
else:
try:
f.write(self.name)
f.write(" failed to upgrade to new image, fell back to old image\n")
except Exception, err:
printd(DBG_ERR, "Failed to write to file %s, err %s" %(G_alarm_file_path, err))
finally:
f.close()
if ( self.changed and self.need_reboot == "yes" ):
if ( self.reboot_now == "yes" ):
raise RebootException
else:
global G_NEED_REBOOT
G_NEED_REBOOT = True
if ( self.changed ):
for item in self.post_action_list:
item.do_it()
def get_cur_fw_ver(self):
printd(DBG_VBS, "in fwtrancation.get_cur_fw_ver")
for item in self.action_list:
item.get_cur_ver()
print("rfut: %s : cur ver is %s" %(item.category, item.cur_ver_str))
def to_string(self):
tmpstr = ( "\n" + "name " + self.name + "\n" +
"action_name_list " + str(self.action_name_list) + "\n" +
"retry_times " + str(self.retry_times) + "\n" +
"trigger_alarm " + self.trigger_alarm + "\n" +
"if_fall_back " + self.if_fall_back + "\n" )
return tmpstr
###############################################################################
#
# CmdItem
#
# Class for each general shell cmds, such as cp rom files etc.
#
###############################################################################
class CmdItem:
def __init__(self, name):
self.name = name
self.cmd_list = []
def add_cmd(self, cmd):
self.cmd_list.append(cmd)
def do_it(self):
printd(DBG_VBS, "cmd: in do_it")
for cc in self.cmd_list:
run_shell_cmd(cc)
def to_string(self):
tmpstr = ( "\n" + "name " + self.name + "\n")
for cc in self.cmd_list:
tmpstr += cc
tmpstr += "\n"
return tmpstr
###############################################################################
#
# CmdAction
#
# Class for general shell cmd actions, each action may contain mulitple cmds
#
###############################################################################
class CmdAction(Action):
def __init__(self, name):
self.name = name
self.action_name_list = []
self.action_list = []
self.post_action_name_list = []
self.post_action_list = []
def add_action_name(self, action_name):
self.action_name_list.append(action_name)
def add_action(self, real_action):
self.action_list.append(real_action)
def do_it(self):
printd(DBG_VBS, "in cmd.do_it()")
for item in self.action_list:
item.do_it()
def to_string(self):
tmpstr = ( "\n" + "name " + self.name + "\n"
+ str(self.action_name_list) + "\n")
return tmpstr
###############################################################################
#
# get_motherboard
#
# call into hwtool to find out what platform we're running on.
#
###############################################################################
def get_motherboard():
cmdline1 = '/opt/tms/bin/hwtool -q motherboard'
cmdline2 = '/opt/tms/bin/hwtool -q mobo-type'
if ( not check_file("/opt/tms/bin/hwtool") ):
raise Exception ('Unable to find hwtool executable')
out1 = run_shell_cmd(cmdline1)
out2 = run_shell_cmd(cmdline2)
return (out1.strip('\n'), out2.strip('\n'))
###############################################################################
#
# populate_cmd_item
#
# set CmdItem members as specified in the xml file. Don't run any cmd here.
#
###############################################################################
def populate_cmd_item(element):
tmpCmd = CmdItem(element.attrib["name"])
for child in element:
if child.tag == "one_cmd":
tmpCmd.cmd_list.append(child.attrib["cmd"])
return tmpCmd
###############################################################################
#
# populate_FW_item
#
# set FWItem members as specified in the xml file. Don't run any cmd here.
#
###############################################################################
def populate_FW_item(element):
tmpFW = FWItem(element.attrib["name"])
tmpFW.category = element.attrib["category"]
for child in element:
if child.tag == "cur_ver":
nrOfCmd = child.attrib["possible_cmds"]
for i in range (int(nrOfCmd)):
cmdAttr = "cmd" + str(i)
tmpFW.cur_ver_cmd_list.append(child.attrib[cmdAttr])
if child.tag == "expected_image":
tmpFW.expected_ver_str = child.attrib["ver"]
tmpFW.expected_img = child.attrib["image_file"]
if "LSI" not in tmpFW.category:
tmpFW.newer_ok = child.attrib["newer_ok"]
if child.tag == "upgrade_cmd":
tmpFW.upgrade_cmd = child.attrib["cmd"]
upgrade_cmd_opt = child.attrib["cmd_option"]
if (len(upgrade_cmd_opt) >=1 and upgrade_cmd_opt[0] is not " "):
tmpFW.upgrade_cmd_opt = " " + upgrade_cmd_opt
else:
tmpFW.upgrade_cmd_opt = upgrade_cmd_opt
if child.tag == "fall_back":
tmpFW.fallback_ver_str = child.attrib["ver"]
tmpFW.fallback_img = child.attrib["image"]
tmpFW.fallback_cmd_opt = child.attrib["cmd_option"]
printd(DBG_VBS, "done with FWItem population")
return tmpFW
###############################################################################
#
# populate_transaction
#
# set FWTransaction members as specified in the xml file. Don't run any cmd here.
#
###############################################################################
def populate_transaction(element):
tmpTr = FWTransaction(element.attrib["name"])
tmpTr.retry_times = element.attrib["retry_times"]
tmpTr.trigger_alarm = element.attrib["alarm_if_fails"]
tmpTr.if_fall_back = element.attrib["fall_back_to_old_image"]
tmpTr.need_reboot = element.attrib["need_reboot"]
if ( tmpTr.need_reboot == "yes" ):
tmpTr.reboot_now = element.attrib["reboot_now"]
len = 0
for child in element:
if child.tag == "item":
tmpTr.add_action_name( child.attrib["name"] )
len += 1
if child.tag == "after_upgrade_action":
tmpTr.add_post_action_name( child.attrib["name"] )
printd(DBG_VBS, "tot item is %d" %len)
return tmpTr
###############################################################################
#
# populate_cmds
#
# set FWTransaction members as specified in the xml file. Don't run any cmd here.
#
###############################################################################
def populate_cmds(element):
tmpCmdAct = CmdAction(element.attrib["name"])
tmpCmdAct.add_action_name(element.attrib["name"])
return tmpCmdAct
###############################################################################
#
# main function
#
# do actual checking and upgrading and falling back
#
###############################################################################
def main():
# Set up command line args
parser = OptionParser(version="%prog Version 0.2")
# command line parser options
parser.add_option("-d", "--debug", action="store", type="int",
dest="debug", default="3",
help="Set debug level, 0 - 4, 0 is none, 4 is showing all msgs")
parser.add_option("-l", "--logfile", action="store", type="string",
dest="logfile", default="",
help="Log stdout to this file (like Unix tee command)")
parser.add_option("-s", "--sys-log", action="store_true",
dest="syslog_enable", default=False,
help="Log stdout to syslog. Disabled by default")
parser.add_option("-c", "--configfile", action="store", type="string",
dest="configfile", default="/opt/rbt/etc/fwspec.xml",
help="specify the config file to use, default is /opt/rbt/etc/fwspec.xml")
parser.add_option("-g", "--get-current-ver", action="store_true",
dest="only_get_cur", default=False,
help="only show current fimware versions, do not upgrade anything")
parser.add_option("-r", "--remove_state_file", action="store_true",
dest="remove_state_file", default=False,
help="delete all the state files related to rfut. This will let the rfut to do upgrade again.")
(options, args) = parser.parse_args()
global SYSLOG_ENABLE
SYSLOG_ENABLE = options.syslog_enable
# log stdout like tee
if (options.logfile != ""):
tee = TeeLog(sys.stdout, options.logfile)
sys.stdout = tee
elif (options.syslog_enable):
tee = TeeLog(sys.stdout, "")
sys.stdout = tee
# set global debug level
global CURRENT_DBG_LEVEL
CURRENT_DBG_LEVEL = options.debug
global G_NEED_REBOOT
G_NEED_REBOOT = False
global G_alarm_file_path
G_alarm_file_path = "/boot/.rfut_alarm"
# check if .upgrade_complete file exist or not. exit if it exists
done_file_path = "/boot/.rfut_upgrade_finished"
state_file_path = "/boot/.rfut_cur_state"
if (options.remove_state_file):
try:
os.remove(done_file_path)
except OSError, err:
printd(DBG_ERR, "Error: Failed to delete state file, error %s" %err )
try:
os.remove(state_file_path)
except OSError, err:
printd(DBG_ERR, "Error: Failed to delete state file, error %s" %err )
try:
os.remove(G_alarm_file_path)
except OSError, err:
printd(DBG_ERR, "Error: Failed to delete alarm file, error %s" %err )
return
if ( check_file( done_file_path, False ) and (not options.only_get_cur)):
printd(DBG_INF, "Firmware Upgrade completed")
sys.exit( 0 )
# read current state from file
done_act_dict = {}
if ( check_file( state_file_path, False ) ):
printd(DBG_VBS, "Firmware state file exist")
f = open(state_file_path, 'r')
for line in f:
(date, time, key, val) = line.split()
done_act_dict[key] = val
f.close()
# read in xml configuration file
xml_file = os.path.abspath(__file__)
xml_file = os.path.dirname(xml_file)
xml_file = os.path.join(xml_file, options.configfile)
# get current mother board type
(part, type) = get_motherboard()
printd(DBG_VBS, "motherboard is" + part + " " + type)
# parse the xml file
try:
tree = etree.parse(xml_file)
except Exception, inst:
printd(DBG_ERR, "Error: Unexpected error opening %s: %s" % (xml_file, inst))
raise
root = tree.getroot()
full_act_dict = {}
real_act_list = []
for element in root.iter("*"):
if element.tag:
printd(DBG_VBS, "tag ---" + element.tag)
if (element.tag == "binaries"):
printd(DBG_VBS, "found binaries")
for child in element:
if child.tag == "cmd":
printd(DBG_VBS, "found cmds " + child.attrib["name"])
tmpBin = Binary(child.attrib["name"])
if tmpBin.whereis() is None:
raise Exception ('Unable to find executable ' + child.attrib["name"])
if (element.tag == "cmds"):
printd(DBG_VBS, "found cmds nodes")
tmpCmd = populate_cmd_item(element)
printd(DBG_VBS, "cmds item" + tmpCmd.to_string())
full_act_dict[tmpCmd.name] = tmpCmd
if (element.tag == "FW_item"):
printd(DBG_VBS, "found FW_item")
tmpFW = populate_FW_item(element)
printd(DBG_VBS, "fw item" + tmpFW.to_string())
full_act_dict[tmpFW.name] = tmpFW
if (element.tag == "motherboard"
and element.attrib["part"] == part
and element.attrib['mobo_type'] == type):
for child in element:
if child.tag == "upgrade":
printd(DBG_VBS, "upgrade len is " + str(len(child)))
for items in child:
if (items.tag == "transaction"):
printd(DBG_VBS, "found transaction")
tmpTr = populate_transaction(items)
printd(DBG_VBS, "transaction is " + tmpTr.to_string())
real_act_list.append( tmpTr )
if (items.tag == "run_cmd"):
printd(DBG_VBS, "found run_cmds")
tmpCmdAct = populate_cmds(items)
printd(DBG_VBS, "cmds is " + tmpCmdAct.to_string())
real_act_list.append( tmpCmdAct )
for tr in real_act_list:
printd(DBG_VBS, "action list is " + tr.to_string())
if ( options.only_get_cur ):
for tr in real_act_list:
for act in tr.action_name_list:
tr.add_action(full_act_dict[act])
try:
tr.get_cur_fw_ver()
except Exception, err:
printd(DBG_ERR, "Failed to get current firmware version, err %s" %err)
return
# perform upgrade
for tr in real_act_list:
if ( tr.name in done_act_dict):
printd(DBG_VBS, "%s is already done, skip it" %tr.name)
continue
for act in tr.action_name_list:
tr.add_action(full_act_dict[act])
for act in tr.post_action_name_list:
tr.add_post_action(full_act_dict[act])
try:
tr.do_it()
except RebootException:
now = datetime.datetime.now()
try:
f = open(state_file_path, 'a')
except Exception, err:
printd(DBG_ERR, "Failed to open file %s, err %s" %(state_file_path, err))
else:
try:
f.write( str(now) )
f.write(" ")
f.write(tr.name)
f.write(" done\n")
except Exception, err:
printd(DBG_ERR, "Failed to write to file %s, err %s" %(state_file_path, err))
finally:
f.close()
printd(DBG_INF, "rebooting....")
sys.exit( 0xff ) #the caller should check the exit value and reboot.
#write to state file
now = datetime.datetime.now()
try:
f = open(state_file_path, 'a')
except Exception, err:
printd(DBG_ERR, "Failed to open file %s, err %s" %(state_file_path, err))
else:
try:
f.write( str(now) )
f.write(" ")
f.write(tr.name)
f.write(" done\n")
except Exception, err:
printd(DBG_ERR, "Failed to write to file %s, err %s" %(state_file_path, err))
finally:
f.close()
# create hidden complete file so that we do not repeat ourselves
try:
finish_file = open(done_file_path, 'w')
except IOError:
printd(DBG_ERR, "Error: Failed to create log file" )
sys.exit ( 1 )
finish_file.close()
if (G_NEED_REBOOT):
printd(DBG_INF, "rebooting....")
sys.exit( 0xff ) #the caller should check the exit value and reboot.
if __name__ == "__main__":
main()
| [
"akkmzack@sharklasers.com"
] | akkmzack@sharklasers.com |
83a1d8573fd98ab9baa0b2388b9256d3d27f2daf | 483d26722245774d0860f45157b0bc578dd2ff15 | /crudproject/crud/views.py | dee5b041dd944c71b1cab6032b56809eee6b90fe | [] | no_license | raianibnfaiz/crudproject | a44a0a9a49e83442bb0019c2141b6304ff10fb2d | 0a7ab9efb57aa57784b169ffb2eea8792daa1b86 | refs/heads/master | 2022-11-28T04:19:27.177597 | 2020-08-05T07:39:36 | 2020-08-05T07:39:36 | 285,215,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 750 | py | from django.shortcuts import render,redirect
from .forms import MyUserForms
from .models import MyUser
def create_user(request):
form= MyUserForms(request.POST or None)
users= MyUser.objects.all()
if form.is_valid():
form.save()
return redirect('/')
return render(request,'index.html',{'form':form,'users':users})
def update_user(request,id):
id=MyUser.objects.get(id=id)
form= MyUserForms(request.POST or None,instance=id)
users= MyUser.objects.all()
if form.is_valid():
form.save()
return redirect('/')
return render(request,'index.html',{'form':form,'users':users})
def delete_user(request,id):
user=MyUser.objects.get(id=id)
user.delete()
return redirect('/')
| [
"raianibnfaiz@gmail.com"
] | raianibnfaiz@gmail.com |
85140adbd56c46390eb3746b437a36b0224da0a8 | f632e642d2727fbe33f7a52ad32b03a3d13d4add | /校庆论文/校庆论文/xiaoqing/ClusterMain.py | 952d5420bdf3206343b287acabbd48ecabefd247 | [] | no_license | east1203/CPlusPlus | 7e0896fce4d36851ab26014ab50de1e3ab0e2f09 | be61a73e54647fd66fcce2f3d93c0703bdb7a6d7 | refs/heads/master | 2020-04-28T04:46:05.868932 | 2019-05-16T06:45:59 | 2019-05-16T06:45:59 | 174,964,387 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,211 | py | # coding=utf-8
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
from math import *
import PseudoCenter
import ClusterFunc
##def EL(centers,clusters:
print("************** start ***************")
#size = 162
size = 214
clustersnum = 9
array_regsloca = np.zeros((size,2))
ind = 0
with open("./regsloca.data","r") as f:
for line in f.readlines():
sline = line.strip("\n")
lline = line.split(" ")
array_regsloca[ind][0] = lline[0]
array_regsloca[ind][1] = lline[1]
ind = ind+1
ind = 0
centers = np.zeros((clustersnum,2))
with open("./init_centers.data","r") as f:
for line in f.readlines():
sline = line.strip("\n")
lline = line.split(" ")
centers[ind][0] = lline[0]
centers[ind][1] = lline[1]
ind = ind+1
stop = False
dis0 = 0
distmp = float('inf')
num = 0
WL = 0 ## 连线总长度
WLpre = float('inf')
TMP = []
TMPA = []
count = 0
continueFlag = True
## 调用寄存器分组函数
while continueFlag:
clusterstmp,centerstmp,WL = ClusterFunc.ClusterFunc(centers,clustersnum,array_regsloca,size)
TMP.append(WL)
if WL < WLpre:
WLpre = WL
centers = centerstmp
clusters = clusterstmp
TMPA.append(WL)
count = 0
## 写回分组中心点
with open("init_centers.data","w") as f:
for i in range(clustersnum):
f.write(str(centers[i][0])+" "+str(centers[i][1])+"\n")
else:
## 连续多次连线总长没有减小就停止分组
count+=1
if count>15:
continueFlag = False
print("wl长度是 ")
print(TMP)
print(TMPA)
# for i in range(1):
# clusters,centerstmp,WL = ClusterFunc.ClusterFunc(centers,clustersnum,array_regsloca,size)
# centers = centerstmp
# TMP.append(WL)
# with open("init_centers.data","w") as f:
# for i in range(clustersnum):
# f.write(str(centers[i][0])+" "+str(centers[i][1])+"\n")
# print("wl长度是 ")
# print(TMP)
## 将分组结果写到文件result.data
with open("result.data","w") as f:
for i in range(clustersnum):
if len(clusters[i])>1:
if clusters[i]:
#result=result.append(str(clusters[i])+'\n')
#print("1111")
l = str(clusters[i]).strip("[")
ll = l.strip("]")
lll = ll.split(",")
for j in range(len(lll)):
f.write(lll[j]+" ")
f.write('\n')
print(clustersnum)
# print("WL is "+str(WL))
# print("WLpre is " + str(WLpre))
print(centers)
print("每个群组中寄存器的数目")
for i in range(clustersnum):
print(len(clusters[i]))
##输出每一个群组中寄存器的位置
#for i in range(clustersnum):
# print("群组"+str(i)+"的寄存器位置如下:")
# for j in range(len(clusters[i])):
# print(str(array_regsloca[clusters[i][j]][0])+" "+str(array_regsloca[clusters[i][j]][1]))
'''
for i in range(clustersnum):
print("群组"+str(i)+"的寄存器位置如下:")
print("群组"+str(i)+"的x坐标: ")
for j in range(len(clusters[i])):
print(str(array_regsloca[clusters[i][j]][0]))
print("群组"+str(i)+"的y坐标: ")
for j in range(len(clusters[i])):
print(str(array_regsloca[clusters[i][j]][1]))
'''
print("************** end ***************")
| [
"dyk1203@126.com"
] | dyk1203@126.com |
3a487eaa5b78fc12e70fe3c362a2bbce9ff5fb7c | fd7498ec78932500333d1c922bdbdebac94fbd7e | /leads/api.py | caeb33d61ba3b09108f7dee82ab41b9f0bdc6ace | [] | no_license | ajay-staruss/DjangoReactLEadManager | 303419f2108379eb8ac78ad1bb7c8e171e2e1ded | 83dadbb2d60657662f46a61aa8cf7fae2c0ee567 | refs/heads/master | 2022-07-02T08:25:32.669245 | 2020-05-16T09:39:04 | 2020-05-16T09:39:04 | 264,403,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | from leads.models import Lead
from rest_framework import viewsets,permissions
from .serializers import LeadSerializer
class LeadViewSet(viewsets.ModelViewSet):
queryset = Lead.objects.all()
permission_classes = [
permissions.AllowAny
]
serializer_class = LeadSerializer | [
"ajaypratap9980@gmail.com"
] | ajaypratap9980@gmail.com |
e49f952e2c76871ed20d6b04f878fb0efc340461 | 9d8188246250c2506bf69ebb44ee5342891d3663 | /model/layers/encoder.py | 9d1d5eee9ac2c25e543abcc6b66ec7b2edb7c2e4 | [] | no_license | wujunjie1998/kg-topic-chat | 22433e35758972a519056b61de117820e26c3de9 | d3d393cdc49d9a82727d4b0aee71692e90fed17f | refs/heads/master | 2020-12-02T01:42:35.543629 | 2019-12-30T04:25:22 | 2019-12-30T04:25:22 | 230,845,762 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,628 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence, PackedSequence
from utils import to_var, reverse_order_valid, PAD_ID
from .rnncells import StackedGRUCell, StackedLSTMCell
import pdb
import copy
class BaseRNNEncoder(nn.Module):
def __init__(self):
"""Base RNN Encoder Class"""
super(BaseRNNEncoder, self).__init__()
@property
def use_lstm(self):
if hasattr(self, 'rnn'):
return isinstance(self.rnn, nn.LSTM)
else:
raise AttributeError('no rnn selected')
def init_h(self, batch_size=None, hidden=None):
"""Return RNN initial state"""
if hidden is not None:
return hidden
if self.use_lstm:
return (to_var(torch.zeros(self.num_layers*self.num_directions,
batch_size,
self.hidden_size)),
to_var(torch.zeros(self.num_layers*self.num_directions,
batch_size,
self.hidden_size)))
else:
return to_var(torch.zeros(self.num_layers*self.num_directions,
batch_size,
self.hidden_size))
def batch_size(self, inputs=None, h=None):
"""
inputs: [batch_size, seq_len]
h: [num_layers, batch_size, hidden_size] (RNN/GRU)
h_c: [2, num_layers, batch_size, hidden_size] (LSTM)
"""
if inputs is not None:
batch_size = inputs.size(0)
return batch_size
else:
if self.use_lstm:
batch_size = h[0].size(1)
else:
batch_size = h.size(1)
return batch_size
def forward(self):
raise NotImplementedError
class EncoderRNN(BaseRNNEncoder):
def __init__(self, vocab_size, embedding_size,
hidden_size, rnn=nn.GRU, num_layers=1, bidirectional=False,
dropout=0.0, bias=True, batch_first=True):
"""Sentence-level Encoder"""
super(EncoderRNN, self).__init__()
self.vocab_size = vocab_size
self.embedding_size = embedding_size
self.hidden_size = hidden_size
self.num_layers = num_layers
self.dropout = dropout
self.batch_first = batch_first
self.bidirectional = bidirectional
if bidirectional:
self.num_directions = 2
else:
self.num_directions = 1
# word embedding
self.embedding = nn.Embedding(vocab_size, embedding_size, padding_idx=PAD_ID)
self.rnn = rnn(input_size=embedding_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional)
def forward(self, inputs, input_length, hidden=None):
"""
Args:
inputs (Variable, LongTensor): [num_setences, max_seq_len]
input_length (Variable, LongTensor): [num_sentences]
Return:
outputs (Variable): [max_source_length, batch_size, hidden_size]
- list of all hidden states
hidden ((tuple of) Variable): [num_layers*num_directions, batch_size, hidden_size]
- last hidden state
- (h, c) or h
"""
batch_size, seq_len = inputs.size()
# Sort in decreasing order of length for pack_padded_sequence()
input_length_sorted, indices = input_length.sort(descending=True)
input_length_sorted = input_length_sorted.data.tolist()
# [num_sentences, max_source_length]
inputs_sorted = inputs.index_select(0, indices)
# [num_sentences, max_source_length, embedding_dim]
embedded = self.embedding(inputs_sorted)
# batch_first=True
rnn_input = pack_padded_sequence(embedded, input_length_sorted,
batch_first=self.batch_first)
hidden = self.init_h(batch_size, hidden=hidden)
# outputs: [batch, seq_len, hidden_size * num_directions]
# hidden: [num_layers * num_directions, batch, hidden_size]
self.rnn.flatten_parameters()
outputs, hidden = self.rnn(rnn_input, hidden)
outputs, outputs_lengths = pad_packed_sequence(outputs, batch_first=self.batch_first)
# Reorder outputs and hidden
_, inverse_indices = indices.sort()
outputs = outputs.index_select(0, inverse_indices)
if self.use_lstm:
hidden = (hidden[0].index_select(1, inverse_indices),
hidden[1].index_select(1, inverse_indices))
else:
hidden = hidden.index_select(1, inverse_indices)
return outputs, hidden
def step(self, inputs, hidden=None):
batch_size = inputs.size(0)
embedded = self.embedding(inputs)
# encoder_hidden: [1, batch_size, hidden_size]
hidden = self.init_h(batch_size, hidden=hidden)
self.rnn.flatten_parameters()
outputs, hidden = self.rnn(embedded, hidden)
return outputs, hidden
class ContextRNN(BaseRNNEncoder):
def __init__(self, input_size, context_size, rnn=nn.GRU, num_layers=1, dropout=0.0,
bidirectional=False, bias=True, batch_first=True):
"""Context-level Encoder"""
super(ContextRNN, self).__init__()
self.input_size = input_size
self.context_size = context_size
self.hidden_size = self.context_size
self.num_layers = num_layers
self.dropout = dropout
self.bidirectional = bidirectional
self.batch_first = batch_first
if bidirectional:
self.num_directions = 2
else:
self.num_directions = 1
self.rnn = rnn(input_size=input_size,
hidden_size=context_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional)
def forward(self, encoder_hidden, conversation_length, hidden=None):
"""
Args:
encoder_hidden (Variable, FloatTensor): [batch_size, max_len, num_layers * direction * hidden_size]
conversation_length (Variable, LongTensor): [batch_size]
Return:
outputs (Variable): [batch_size, max_seq_len, hidden_size]
- list of all hidden states
hidden ((tuple of) Variable): [num_layers*num_directions, batch_size, hidden_size]
- last hidden state
- (h, c) or h
"""
batch_size, seq_len, _ = encoder_hidden.size()
# Sort for PackedSequence
conv_length_sorted, indices = conversation_length.sort(descending=True)
conv_length_sorted = conv_length_sorted.data.tolist()
encoder_hidden_sorted = encoder_hidden.index_select(0, indices)
rnn_input = pack_padded_sequence(encoder_hidden_sorted, conv_length_sorted, batch_first=True)
hidden = self.init_h(batch_size, hidden=hidden)
self.rnn.flatten_parameters()
outputs, hidden = self.rnn(rnn_input, hidden)
# outputs: [batch_size, max_conversation_length, context_size]
outputs, outputs_length = pad_packed_sequence(outputs, batch_first=True)
# reorder outputs and hidden
_, inverse_indices = indices.sort()
outputs = outputs.index_select(0, inverse_indices)
if self.use_lstm:
hidden = (hidden[0].index_select(1, inverse_indices),
hidden[1].index_select(1, inverse_indices))
else:
hidden = hidden.index_select(1, inverse_indices)
# outputs: [batch, seq_len, hidden_size * num_directions]
# hidden: [num_layers * num_directions, batch, hidden_size]
return outputs, hidden
def step(self, encoder_hidden, hidden):
batch_size = encoder_hidden.size(0)
# encoder_hidden: [1, batch_size, hidden_size]
pdb.set_trace()
encoder_hidden = torch.unsqueeze(encoder_hidden, 1)
if hidden is None:
hidden = self.init_h(batch_size, hidden=None)
self.rnn.flatten_parameters()
outputs, hidden = self.rnn(encoder_hidden, hidden)
return outputs, hidden
| [
"1052231507@qq.com"
] | 1052231507@qq.com |
fa7e68d4fc0084365ff3f46b598bd7a7a7599c45 | 5729cdb7af50aa8afedd459e53f7e7f8e6b77b34 | /src/settings.py | a860f4ef04c2c8af9d1dcf9088ffa2124575c0dc | [] | no_license | oranb83/sendgrid-cli | 75b6b9b761d7657846869f73e4c162a2310848b3 | a56fba12557480befdbc28d5f5de24ca8cc6b5b2 | refs/heads/master | 2023-06-14T20:37:27.331325 | 2021-07-13T08:49:54 | 2021-07-13T08:49:54 | 385,169,467 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,375 | py | import os
SENDGRID_API_KEY_PRODUCTION = os.getenv('SENDGRID_API_KEY_PRODUCTION')
BASE_SCOPES = [
'access_settings.activity.read',
'access_settings.whitelist.read',
'alerts.read',
'asm.groups.read',
'browsers.stats.read',
'categories.create',
'categories.delete',
'categories.read',
'categories.stats.read',
'categories.stats.sums.read',
'categories.update',
'clients.desktop.stats.read',
'clients.phone.stats.read',
'clients.stats.read',
'clients.tablet.stats.read',
'clients.webmail.stats.read',
'devices.stats.read',
'email_testing.read',
'email_testing.write',
'geo.stats.read',
'ips.pools.ips.read',
'ips.pools.read',
'ips.read',
'ips.warmup.read',
'mail.batch.read',
'mail_settings.address_whitelist.read',
'mail_settings.address_whitelist.update',
'mail_settings.bcc.read',
'mail_settings.bcc.update',
'mail_settings.bounce_purge.read',
'mail_settings.bounce_purge.update',
'mail_settings.footer.read',
'mail_settings.footer.update',
'mail_settings.forward_bounce.read',
'mail_settings.forward_bounce.update',
'mail_settings.forward_spam.read',
'mail_settings.forward_spam.update',
'mail_settings.plain_content.read',
'mail_settings.plain_content.update',
'mail_settings.spam_check.read',
'mail_settings.spam_check.update',
'mail_settings.template.read',
'mail_settings.template.update',
'mailbox_providers.stats.read',
'stats.global.read',
'stats.read',
'suppression.read',
'templates.create',
'templates.delete',
'templates.read',
'templates.update',
'templates.versions.activate.create',
'templates.versions.create',
'templates.versions.delete',
'templates.versions.read',
'templates.versions.update',
'tracking_settings.click.read',
'tracking_settings.google_analytics.read',
'tracking_settings.open.read',
'tracking_settings.subscription.read',
'user.scheduled_sends.read',
'user.settings.enforced_tls.read',
'user.timezone.read',
'user.webhooks.event.settings.read',
'user.webhooks.event.settings.update',
'user.webhooks.event.test.create',
'user.webhooks.event.test.read',
'user.webhooks.event.test.update',
'user.webhooks.parse.settings.read',
'user.webhooks.parse.stats.read'
]
| [
"oran.ben-zur@lemonade.com"
] | oran.ben-zur@lemonade.com |
07cf4d48d0b55b9e6fe1bd0d4344f1bd79798a8e | 450d61dd7041acbd9e143bfcf2032f316e8c636d | /src/models/__init__.py | b6d446905eafe53865c5fb64d5781325902d1642 | [] | no_license | zyunnn/fyt_spatiotemporal | ef7454b88d9672607955119ba3f644912058e5e9 | b2bcc90bc776307e068081fff437a399b62cfb76 | refs/heads/main | 2023-05-21T01:39:53.065828 | 2021-06-01T14:10:09 | 2021-06-01T14:10:09 | 322,542,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38 | py | # from models.stgcn import build_model | [
"zhiyun@gmail.com"
] | zhiyun@gmail.com |
c0a063610c0a0621da7a69ee2f01d04b83117f62 | 7ce2dd7b4b0671c8b051855e4bf1f6e85e8f7567 | /2018202115/HumanEye-2018201115-final_submit/codes/image_captioning-master/utils/coco/pycocoevalcap/bleu/bleu.py | d5fa008f4edcdae02b4a687a71743b2a555443b8 | [
"MIT",
"BSD-2-Clause-Views"
] | permissive | weimingtom/ai20projects | b63286df9c5d72cb6ed2c188a2d58e80f82e4ca9 | ca6f993cfa569250b3116921f4b481d01de36197 | refs/heads/master | 2023-02-08T04:59:19.719235 | 2020-12-30T00:22:36 | 2020-12-30T00:22:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,289 | py | #!/usr/bin/env python
#
# File Name : bleu.py
#
# Description : Wrapper for BLEU scorer.
#
# Creation Date : 06-01-2015
# Last Modified : Thu 19 Mar 2015 09:13:28 PM PDT
# Authors : Hao Fang <hfang@uw.edu> and Tsung-Yi Lin <tl483@cornell.edu>
from .bleu_scorer import BleuScorer
#from pycocoevalcap.bleu import BleuScore
class Bleu:
def __init__(self, n=4):
# default compute Blue score up to 4
self._n = n
self._hypo_for_image = {}
self.ref_for_image = {}
def compute_score(self, gts, res):
assert(gts.keys() == res.keys())
imgIds = gts.keys()
bleu_scorer = BleuScorer(n=self._n)
for id in imgIds:
hypo = res[id]
ref = gts[id]
# Sanity check.
assert(type(hypo) is list)
assert(len(hypo) == 1)
assert(type(ref) is list)
assert(len(ref) >= 1)
bleu_scorer += (hypo[0], ref)
#score, scores = bleu_scorer.compute_score(option='shortest')
score, scores = bleu_scorer.compute_score(option='closest', verbose=1)
#score, scores = bleu_scorer.compute_score(option='average', verbose=1)
# return (bleu, bleu_info)
return score, scores
def method(self):
return "Bleu"
| [
"1608231147@qq.com"
] | 1608231147@qq.com |
b46a3f8bb2a7aa7189a03e9bb03385aa2adc1203 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_135/3207.py | 49316f0601d1e454902936007d3f7d43574994a8 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | with open("a.in", 'r') as f:
T = int(f.readline())
for t in range(1, T+1):
S = set(range(1, 17))
for i in range(2):
n = int(f.readline())
for j in range(1, 5):
line = f.readline()
if n == j:
S = S & set(map(int, line.split()))
if len(S) == 0:
print("Case #%d: Volunteer cheated!" % t)
elif len(S) > 1:
print("Case #%d: Bad magician!" % t)
else:
print("Case #%d: %d" % (t, list(S)[0]))
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
9912abe02a2963442cfacb21a5e7c9030823ce04 | 72bb6deb4a8657fc494777847e356c328fe8550d | /PythonS/script1.py | 1d3d64cde0bd0f88080ed26202c14e8d80e65aec | [] | no_license | rising-turtle/study | daafeac486e274af47268e9c7d66feacd3476a44 | 76b414d0a47692f997539b34b356c34a3528f34d | refs/heads/master | 2021-12-09T07:23:41.800642 | 2021-12-02T19:34:44 | 2021-12-02T19:34:44 | 90,572,871 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | #!/usr/bin/python
def filerw(f):
fr = open(f, "w")
fr.write("I love Python! \n Hello, World!\n")
fr.close()
fw = open(f, "r")
text = fw.readline()
print(text)
if __name__=='__main__':
filerw("1.txt")
| [
"hxzhang1@ualr.edu"
] | hxzhang1@ualr.edu |
d9a6b276047aedbc1ab62415fd3b8fcf764596d0 | bf9aab3694c7cb0b8fbc588f79da29f3993d77b5 | /search/djangohaystack/migrations/0001_initial.py | 2511f634f69cdaba051f6ed4114da4c0a1594463 | [] | no_license | clarle/council-catalog | c765bf641c114e426bd170f6ed56a2e2a3df9c79 | 123348d60233191d8938ec8341bbc5c0b4b3ad7b | refs/heads/master | 2021-01-21T08:20:53.389689 | 2017-08-11T02:32:10 | 2017-08-11T02:32:10 | 101,958,092 | 0 | 0 | null | 2017-08-31T04:09:10 | 2017-08-31T04:09:10 | null | UTF-8 | Python | false | false | 902 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-06-27 21:47
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pub_date', models.DateTimeField()),
('title', models.CharField(max_length=200)),
('body', models.TextField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"hoangle2806@gmail.com"
] | hoangle2806@gmail.com |
33deb59137072fe9d5e752110a6fae15296410e0 | af496ed4911fe3c1ba36725f1131200436235d8e | /sentry_telegram/__init__.py | 59cd5d9848182e81ddf3e07a613b22f487d35719 | [
"MIT"
] | permissive | AlwxSin/sentry-telegram | 3058fc28023b82ab6554aa55dad1c38fae8f12d0 | 7ed32234ccc6205381b8c38e6e590f6fc14a7250 | refs/heads/master | 2020-03-11T03:22:45.175270 | 2018-04-16T14:23:27 | 2018-04-16T14:23:27 | 129,745,341 | 0 | 1 | MIT | 2018-04-16T13:09:37 | 2018-04-16T13:09:36 | null | UTF-8 | Python | false | false | 421 | py | # coding: utf-8
"""
Plugin for Sentry which allows sending notification via Telegram messenger.
"""
from django.conf import settings
__version__ = '0.2.0'
if settings.configured:
from sentry.plugins import plugins, register
from plugin import TelegramNotificationsPlugin
if TelegramNotificationsPlugin.slug not in [plugin.slug for plugin in plugins.all()]:
register(TelegramNotificationsPlugin)
| [
"butorovv@gmail.com"
] | butorovv@gmail.com |
c46792451b28fc636b23223660a4fbab590ff19d | f420dd6405d205f19c06e82e5979a7dd48092867 | /importer.py | 45c91b0a2e72d1a43cf5d24213cbc31ad63798d9 | [] | no_license | 23tymas/cs50final | c276b9207850b346c0de749f2dec75e52c90282c | e576c29e8961283cdd21ff322e21eae32a0c379d | refs/heads/main | 2023-02-09T12:01:28.434502 | 2020-12-30T05:52:03 | 2020-12-30T05:52:03 | 325,461,991 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | import sqlite3
import csv
db = sqlite3.connect('dates.sqlite')
cur = db.cursor()
with open("dates.csv", "r") as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
rowdate = row['date']
#print(rowdate)
rowevent = row['event']
cur.execute('INSERT INTO dates (date, event) VALUES(?,?)', (rowdate, rowevent))
db.commit()
db.close()
| [
"noreply@github.com"
] | noreply@github.com |
4511b0ad24780352eda0aa3ad4c0b72d9782194f | 37a802a0b0b407225210cc8fe16fd6335d01c23d | /biopy.py | 86f69479a5071e5c7fe13f0143820c0af16e4740 | [] | no_license | Aakritisingla1895/Recsys | 9befba6acf43eb4e0a377349a6fd512f49f01a90 | b51945bdf443169b971f5b27daf0b3d57c99c31a | refs/heads/master | 2022-12-11T11:31:03.931031 | 2020-04-13T07:06:23 | 2020-04-13T07:06:23 | 212,941,788 | 0 | 0 | null | 2022-12-08T06:14:03 | 2019-10-05T04:06:13 | Jupyter Notebook | UTF-8 | Python | false | false | 1,018 | py | from Bio import Entrez
def search(query):
Entrez.email = 'your.email@example.com'
handle = Entrez.esearch(db='pubmed',
sort='relevance',
retmax='20',
retmode='xml',
term=query)
results = Entrez.read(handle)
return results
def fetch_details(id_list):
ids = ','.join(id_list)
Entrez.email = 'your.email@example.com'
handle = Entrez.efetch(db='pubmed',
retmode='xml',
id=ids)
results = Entrez.read(handle)
return results
if __name__ == '__main__':
results = search('fever')
id_list = results['IdList']
papers = fetch_details(id_list)
for i, paper in enumerate(papers['PubmedArticle']):
print("%d) %s" % (i+1, paper['MedlineCitation']['Article']['ArticleTitle']))
# Pretty print the first paper in full
#import json
#print(json.dumps(papers[0], indent=2, separators=(',', ':'))) | [
"aakritisingla18@gmail.com"
] | aakritisingla18@gmail.com |
9c69e890954b39c53456d3274149e26adb8cba6e | 2cf4c28f533065153b23c3b4084bf905467f4e23 | /utils/tensor_viewer/plugins/sandwich.py | 4395a597082b8f236ed00847d43cfbb277d7c9a2 | [] | no_license | WilliamRo/tframe | 94e75b4d7fd482ab5edeff2db966f4316390e32b | 2ac00b2a05fd65529adb7edf7123b3eea6e5e6f2 | refs/heads/master | 2023-09-01T22:02:02.372416 | 2023-08-24T08:10:26 | 2023-08-24T08:10:26 | 92,593,033 | 17 | 7 | null | 2022-07-23T01:35:10 | 2017-05-27T10:55:48 | Python | UTF-8 | Python | false | false | 1,082 | py | import re
import numpy as np
import matplotlib
from matplotlib.ticker import FuncFormatter
from tframe.utils.tensor_viewer.plugin import Plugin, VariableWithView
def _recursive_modify(v_dict, level=0):
if len(v_dict) == 0: return
assert isinstance(v_dict, dict)
if isinstance(list(v_dict.values())[0], dict):
for e_key, e_dict in v_dict.items():
print('>> Modifying dict {} ...'.format(e_key))
_recursive_modify(e_dict, level=level + 1)
return
# Here the values in v_dict must be lists
for key in list(v_dict.keys()):
if not re.fullmatch(r'dL/dS\[\d+\]', key): continue
triangle_list = v_dict[key]
new_list = []
for triangle in triangle_list:
assert isinstance(triangle, np.ndarray) and len(triangle.shape) == 2
bottom = np.sum(triangle, axis=0, keepdims=True)
new_list.append(np.concatenate(
[triangle, np.zeros_like(bottom), bottom], axis=0))
v_dict[key] = new_list
def modifier(v_dict):
print('>> Modifying by sandwich ...')
_recursive_modify(v_dict)
plugin = Plugin(dict_modifier=modifier)
| [
"willi4m@zju.edu.cn"
] | willi4m@zju.edu.cn |
b98641d2d0a56568ac2c6e71b87e0798ba7dc7d4 | 8476b24d7e1d9a3592df5efd25a701c1a775a970 | /funcs.py | 6cb92bfe99b98c97af225c6278ae4ae8e2947c81 | [] | no_license | ahmedhosny/kerasLungs | 213ddf13d569d4d4e9db46461d7492201bd5f138 | a19434127034c7937539f7cdfd27f9d57e98f264 | refs/heads/master | 2020-02-26T15:12:24.824598 | 2017-05-22T14:35:45 | 2017-05-22T14:35:45 | 83,595,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40,718 | py |
from __future__ import division
from __future__ import print_function
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import krs
import pandas as pd
import os
from sklearn.metrics import roc_curve, auc
from keras.utils import np_utils
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten , advanced_activations
from keras.preprocessing.image import ImageDataGenerator
from keras.layers import Convolution2D, MaxPooling2D, Convolution3D , MaxPooling3D
from sklearn.metrics import roc_auc_score
import time
from keras import backend as K
import random
from tensorflow.python.ops import nn
from keras.layers.normalization import BatchNormalization
K.set_image_dim_ordering('tf')
#
#
# `7MM"""Yb. db MMP""MM""YMM db
# MM `Yb. ;MM: P' MM `7 ;MM:
# MM `Mb ,V^MM. MM ,V^MM.
# MM MM ,M `MM MM ,M `MM
# MM ,MP AbmmmqMA MM AbmmmqMA
# MM ,dP'A' VML MM A' VML
# .JMMmmmdP'.AMA. .AMMA..JMML..AMA. .AMMA.
#
#
def manageDataFrames():
trainList = ["moffitt"] # , , , , ,"oncopanel" , "moffitt","moffittSpore" ,"oncomap" , ,"lung3"
validateList = ["lung3"]
testList = ["moffittSpore"] # split to val and test
dataFrame = pd.DataFrame.from_csv('master_170228.csv', index_col = 0)
dataFrame = dataFrame [
( pd.notnull( dataFrame["pathToData"] ) ) &
( pd.notnull( dataFrame["pathToMask"] ) ) &
( pd.notnull( dataFrame["stackMin"] ) ) &
( pd.isnull( dataFrame["patch_failed"] ) ) &
# ( pd.notnull( dataFrame["surv1yr"] ) ) &
( pd.notnull( dataFrame["surv2yr"] ) ) &
( pd.notnull( dataFrame["histology_grouped"] ) ) &
( pd.notnull( dataFrame["stage"] ) )
# ( pd.notnull( dataFrame["age"] ) )
]
dataFrame = dataFrame.reset_index(drop=True)
###### FIX ALL
#1# clean histology - remove smallcell and other
# histToInclude - only NSCLC
histToInclude = [1.0,2.0,3.0,4.0]
# not included - SCLC and other and no data [ 0,5,6,7,8,9 ]
dataFrame = dataFrame [ dataFrame.histology_grouped.isin(histToInclude) ]
dataFrame = dataFrame.reset_index(drop=True)
# #2# use 1,2,3 stages no 1
stageToInclude = [1.0,2.0,3.0]
dataFrame = dataFrame [ dataFrame.stage.isin(stageToInclude) ]
dataFrame = dataFrame.reset_index(drop=True)
print ("all patients: " , dataFrame.shape)
# train
dataFrameTrain = dataFrame [ dataFrame["dataset"].isin(trainList) ]
dataFrameTrain = dataFrameTrain.reset_index(drop=True)
print ("final - train size : " , dataFrameTrain.shape)
# Val
dataFrameValidate = dataFrame [ dataFrame["dataset"].isin(validateList) ]
dataFrameValidate = dataFrameValidate.reset_index(drop=True)
print ("final - val size : " , dataFrameValidate.shape)
# TEST
dataFrameTest = dataFrame [ dataFrame["dataset"].isin(testList) ]
dataFrameTest = dataFrameTest.reset_index(drop=True)
print ("final - test size : " , dataFrameTest.shape)
return dataFrameTrain,dataFrameValidate,dataFrameTest
# # run 50
# def manageDataFramesEqually():
# trainList = ["nsclc_rt"]
# validateList = ["lung1"]
# testList = ["lung2"]
# dataFrame = pd.DataFrame.from_csv('master_170228.csv', index_col = 0)
# dataFrame = dataFrame [
# ( pd.notnull( dataFrame["pathToData"] ) ) &
# ( pd.notnull( dataFrame["pathToMask"] ) ) &
# ( pd.notnull( dataFrame["stackMin"] ) ) &
# ( pd.isnull( dataFrame["patch_failed"] ) ) &
# # ( pd.notnull( dataFrame["surv1yr"] ) ) &
# ( pd.notnull( dataFrame["surv2yr"] ) ) &
# ( pd.notnull( dataFrame["histology_grouped"] ) ) # &
# # ( pd.notnull( dataFrame["stage"] ) )
# # ( pd.notnull( dataFrame["age"] ) )
# ]
# dataFrame = dataFrame.reset_index(drop=True)
# ###### FIX ALL
# #1# clean histology - remove smallcell and other
# # histToInclude - only NSCLC
# histToInclude = [1.0,2.0,3.0,4.0]
# # not included - SCLC and other and no data [ 0,5,6,7,8,9 ]
# dataFrame = dataFrame [ dataFrame.histology_grouped.isin(histToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# #2# use 1,2,3 stages
# # stageToInclude = [1.0,2.0,3.0]
# # dataFrame = dataFrame [ dataFrame.stage.isin(stageToInclude) ]
# # dataFrame = dataFrame.reset_index(drop=True)
# # print ("all patients: " , dataFrame.shape)
# ###### GET TRAINING
# dataFrameTrain = dataFrame [ dataFrame["dataset"].isin(trainList) ]
# #3# type of treatment - use only radio or chemoRadio - use .npy file
# chemoRadio = np.load("rt_chemoRadio.npy").astype(str)
# dataFrameTrain = dataFrameTrain [ dataFrameTrain["patient"].isin(chemoRadio) ]
# #4# (rt only) use all causes of death
# # not implemented
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# # print ("train patients " , dataFrameTrain.shape)
# #### GET VAL
# dataFrameValidate = dataFrame [ dataFrame["dataset"].isin(validateList) ]
# dataFrameValidate = dataFrameValidate.reset_index(drop=True)
# # print ("validate patients : " , dataFrameValidate.shape)
# ##### GET TEST
# dataFrameTest = dataFrame [ dataFrame["dataset"].isin(testList) ]
# dataFrameTest = dataFrameTest.reset_index(drop=True)
# # print ("test size : " , dataFrameTest.shape)
# # put all, shuffle then reset index
# dataFrame = pd.concat ( [ dataFrameTrain , dataFrameValidate , dataFrameTest ] )
# dataFrame = dataFrame.sample( frac=1 , random_state = 245 ) # this random seed gives ok class balance in training
# dataFrame = dataFrame.reset_index(drop=True)
# # print ("all together : " , dataFrame.shape)
# # split
# dataFrameTrain, dataFrameValidate, dataFrameTest = np.split(dataFrame,
# [int(.75*len(dataFrame)), int(.83*len(dataFrame))])
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# print ( "zeros: " , len( [ x for x in dataFrameTrain.surv2yr.tolist() if x == 0.0 ] ) )
# print ( "ones: " , len( [ x for x in dataFrameTrain.surv2yr.tolist() if x == 1.0 ] ) )
# print ("train patients " , dataFrameTrain.shape)
# #### GET VAL
# dataFrameValidate = dataFrameValidate.reset_index(drop=True)
# print ( "zeros: " , len( [ x for x in dataFrameValidate.surv2yr.tolist() if x == 0.0 ] ) )
# print ( "ones: " , len( [ x for x in dataFrameValidate.surv2yr.tolist() if x == 1.0 ] ) )
# print ("validate patients : " , dataFrameValidate.shape)
# ##### GET TEST
# dataFrameTest = dataFrameTest.reset_index(drop=True)
# print ( "zeros: " , len( [ x for x in dataFrameTest.surv2yr.tolist() if x == 0.0 ] ) )
# print ( "ones: " , len( [ x for x in dataFrameTest.surv2yr.tolist() if x == 1.0 ] ) )
# print ("test size : " , dataFrameTest.shape)
# return dataFrameTrain, dataFrameValidate,dataFrameTest
# def manageDataFrames():
# trainList = ["nsclc_rt"] # , , , , ,"oncopanel" , "moffitt","moffittSpore" ,"oncomap" , ,"lung3"
# validateList = ["lung2"]
# testList = ["lung1"] # split to val and test
# dataFrame = pd.DataFrame.from_csv('master_170228.csv', index_col = 0)
# dataFrame = dataFrame [
# ( pd.notnull( dataFrame["pathToData"] ) ) &
# ( pd.notnull( dataFrame["pathToMask"] ) ) &
# ( pd.notnull( dataFrame["stackMin"] ) ) &
# ( pd.isnull( dataFrame["patch_failed"] ) ) &
# # ( pd.notnull( dataFrame["surv1yr"] ) ) &
# ( pd.notnull( dataFrame["surv2yr"] ) ) &
# ( pd.notnull( dataFrame["histology_grouped"] ) ) &
# ( pd.notnull( dataFrame["stage"] ) )
# # ( pd.notnull( dataFrame["age"] ) )
# ]
# dataFrame = dataFrame.reset_index(drop=True)
# ###### FIX ALL
# #1# clean histology - remove smallcell and other
# # histToInclude - only NSCLC
# histToInclude = [1.0,2.0,3.0,4.0]
# # not included - SCLC and other and no data [ 0,5,6,7,8,9 ]
# dataFrame = dataFrame [ dataFrame.histology_grouped.isin(histToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# # #2# use 1,2,3 stages no 1
# stageToInclude = [1.0,2.0,3.0]
# dataFrame = dataFrame [ dataFrame.stage.isin(stageToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# print ("all patients: " , dataFrame.shape)
# ###### GET TRAINING / VALIDATION
# dataFrameTrain = dataFrame [ dataFrame["dataset"].isin(trainList) ]
# #3# type of treatment - use only radio or chemoRadio - use .npy file
# chemoRadio = np.load("rt_chemoRadio.npy").astype(str)
# dataFrameTrain = dataFrameTrain [ dataFrameTrain["patient"].isin(chemoRadio) ]
# #4# (rt only) use all causes of death
# # not implemented
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# print ("train patients " , dataFrameTrain.shape)
# # Val
# dataFrameValidate = dataFrame [ dataFrame["dataset"].isin(validateList) ]
# dataFrameValidate = dataFrameValidate.reset_index(drop=True)
# print ("final - val size : " , dataFrameValidate.shape)
# # TEST
# dataFrameTest = dataFrame [ dataFrame["dataset"].isin(testList) ]
# dataFrameTest = dataFrameTest.reset_index(drop=True)
# print ("final - test size : " , dataFrameTest.shape)
# return dataFrameTrain,dataFrameValidate,dataFrameTest
# def manageDataFrames():
# trainList = ["nsclc_rt"] # , , , , ,"oncopanel" , "moffitt","moffittSpore" ,"oncomap" , ,"lung3"
# validateList = ["lung2"] # leave empty
# testList = ["lung1"] # split to val and test
# dataFrame = pd.DataFrame.from_csv('master_170228.csv', index_col = 0)
# dataFrame = dataFrame [
# ( pd.notnull( dataFrame["pathToData"] ) ) &
# ( pd.notnull( dataFrame["pathToMask"] ) ) &
# ( pd.notnull( dataFrame["stackMin"] ) ) &
# ( pd.isnull( dataFrame["patch_failed"] ) ) &
# # ( pd.notnull( dataFrame["surv1yr"] ) ) &
# ( pd.notnull( dataFrame["surv2yr"] ) ) &
# ( pd.notnull( dataFrame["histology_grouped"] ) ) # &
# ( pd.notnull( dataFrame["stage"] ) )
# # ( pd.notnull( dataFrame["age"] ) )
# ]
# dataFrame = dataFrame.reset_index(drop=True)
# ###### FIX ALL
# #1# clean histology - remove smallcell and other
# # histToInclude - only NSCLC
# histToInclude = [1.0,2.0,3.0,4.0]
# # not included - SCLC and other and no data [ 0,5,6,7,8,9 ]
# dataFrame = dataFrame [ dataFrame.histology_grouped.isin(histToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# # #2# use 1,2,3 stages no 1
# stageToInclude = [1.0,2.0,3.0]
# dataFrame = dataFrame [ dataFrame.stage.isin(stageToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# print ("all patients: " , dataFrame.shape)
# ###### GET TRAINING / VALIDATION
# dataFrameTrain = dataFrame [ dataFrame["dataset"].isin(trainList) ]
# #3# type of treatment - use only radio or chemoRadio - use .npy file
# chemoRadio = np.load("rt_chemoRadio.npy").astype(str)
# dataFrameTrain = dataFrameTrain [ dataFrameTrain["patient"].isin(chemoRadio) ]
# #4# (rt only) use all causes of death
# # not implemented
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# print ("train patients " , dataFrameTrain.shape)
# dataFrameValidate = dataFrame [ dataFrame["dataset"].isin(validateList) ]
# dataFrameValidate = dataFrameValidate.reset_index(drop=True)
# print ("validate patients : " , dataFrameValidate.shape)
# #
# # now combine train and val , then split them.
# dataFrameTrainValidate = pd.concat([dataFrameTrain,dataFrameValidate] , ignore_index=False )
# dataFrameTrainValidate = dataFrameTrainValidate.sample( frac=1 , random_state = 42 )
# dataFrameTrainValidate = dataFrameTrainValidate.reset_index(drop=True)
# print ("final - train and validate patients : " , dataFrameTrainValidate.shape)
# thirty = int(dataFrameTrainValidate.shape[0]*0.06) ######################################
# if thirty % 2 != 0:
# thirty = thirty + 1
# # get 0's and 1's.
# zero = dataFrameTrainValidate [ (dataFrameTrainValidate['surv2yr']== 0.0) ]
# one = dataFrameTrainValidate [ (dataFrameTrainValidate['surv2yr']== 1.0) ]
# print ( zero.shape , one.shape )
# # split to train and val
# half = int(thirty/2.0)
# trueList = [True for i in range (half)]
# #
# zeroFalseList = [False for i in range (zero.shape[0] - half )]
# zero_msk = trueList + zeroFalseList
# random.seed(41)
# random.shuffle(zero_msk)
# zero_msk = np.array(zero_msk)
# #
# oneFalseList = [False for i in range (one.shape[0] - half )]
# one_msk = trueList + oneFalseList
# random.seed(41)
# random.shuffle(one_msk)
# one_msk = np.array(one_msk)
# # TRAIN
# zero_train = zero[~zero_msk]
# one_train = one[~one_msk]
# dataFrameTrain = pd.DataFrame()
# dataFrameTrain = dataFrameTrain.append( zero_train ) #.sample( frac=0.73 , random_state = 42 )
# dataFrameTrain = dataFrameTrain.append(one_train)
# dataFrameTrain = dataFrameTrain.sample( frac=1 , random_state = 42 )
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# print ('final - train size:' , dataFrameTrain.shape)
# # VALIDATE
# zero_val = zero[zero_msk]
# one_val = one[one_msk]
# dataFrameValidate = pd.DataFrame()
# dataFrameValidate = dataFrameValidate.append(zero_val)
# dataFrameValidate = dataFrameValidate.append(one_val)
# dataFrameValidate = dataFrameValidate.sample( frac=1 , random_state = 42 )
# dataFrameValidate = dataFrameValidate.reset_index(drop=True)
# print ('final - validate size:' , dataFrameValidate.shape)
# # TEST
# dataFrameTest = dataFrame [ dataFrame["dataset"].isin(testList) ]
# dataFrameTest = dataFrameTest.reset_index(drop=True)
# print ("final - test size : " , dataFrameTest.shape)
# return dataFrameTrain,dataFrameValidate,dataFrameTest
# used for evaluating performance
def aggregate(logits,mul):
logitsOut = []
#
for i in range ( 0,logits.shape[0],mul ):
tempVal0 = 0
tempVal1 = 0
for k in range (mul):
tempVal0 += logits[i+k][0]
tempVal1 += logits[i+k][1]
val0 = tempVal0 / (mul*1.0)
val1 = tempVal1 / (mul*1.0)
logitsOut.append( [ val0,val1 ] )
#
return np.array(logitsOut)
def getXandY(dataFrame,imgSize):
arrList = []
y = []
zeros = 0
ones = 0
# clincical = []
for i in range (dataFrame.shape[0]):
npy = "/home/ahmed/data/" + str(dataFrame.dataset[i]) + "_" + str(dataFrame.patient[i]) + ".npy"
# npy = "~/data/" + str(dataFrame.dataset[i]) + "_" + str(dataFrame.patient[i]) + ".npy"
arr = np.load(npy)
# X #
arrList.append ( arr )
# Y #
if whatToPredict == "survival":
y.append ( int(dataFrame.surv2yr[i]) )
elif whatToPredict == "stage":
y.append ( int(dataFrame.stage[i]) -1 )
elif whatToPredict == "histology":
y.append ( int(dataFrame.histology_grouped[i]) -1 )
# zeros and ones
if int(dataFrame.surv2yr[i]) == 1:
ones = ones+1
elif int(dataFrame.surv2yr[i]) == 0:
zeros = zeros+1
else:
raise Exception("a survival value is not 0 or 1")
# # now clinical
# clincicalVector = [ dataFrame.age[i] , dataFrame.stage[i] , dataFrame.histology_grouped[i] ]
# clincical.extend( [clincicalVector for x in range(1)] )
# after loop
arrList = np.array(arrList, 'float32')
y = np.array(y, 'int8')
y = np_utils.to_categorical(y, NUMCLASSES)
# clincical = np.array(clincical , 'float32' )
return arrList,y,zeros,ones # ,clincical
def getX(dataFrame,imgSize):
arrList = []
for i in range (dataFrame.shape[0]):
npy = "/home/ahmed/data/" + str(dataFrame.dataset[i]) + "_" + str(dataFrame.patient[i]) + ".npy"
arr = np.load(npy)
# X #
arrList.append ( arr )
# after loop
arrList = np.array(arrList, 'float32')
return arrList
#
#
# `7MMF' `YMM' `7MM"""YMM `7MM"""Mq. db .M"""bgd `7MMM. ,MMF' .g8""8q. `7MM"""Yb. `7MM"""YMM `7MMF'
# MM .M' MM `7 MM `MM. ;MM: ,MI "Y MMMb dPMM .dP' `YM. MM `Yb. MM `7 MM
# MM .d" MM d MM ,M9 ,V^MM. `MMb. M YM ,M MM dM' `MM MM `Mb MM d MM
# MMMMM. MMmmMM MMmmdM9 ,M `MM `YMMNq. M Mb M' MM MM MM MM MM MMmmMM MM
# MM VMA MM Y , MM YM. AbmmmqMA . `MM M YM.P' MM MM. ,MP MM ,MP MM Y , MM ,
# MM `MM. MM ,M MM `Mb. A' VML Mb dM M `YM' MM `Mb. ,dP' MM ,dP' MM ,M MM ,M
# .JMML. MMb..JMMmmmmMMM .JMML. .JMM..AMA. .AMMA.P"Ybmmd" .JML. `' .JMML. `"bmmd"' .JMMmmmdP' .JMMmmmmMMM .JMMmmmmMMM
#
#
# not used
def makeClinicalModel():
model = Sequential()
# just histology, stage and age
model.add(Dense( 3, input_dim=(3)) ) # 512
return model
def make2dConvModel(imgSize,regul):
# regul - norm - act
#(samples, rows, cols, channels) if dim_ordering='tf'.
model = Sequential()
model.add(Convolution2D(48, 7, 7, border_mode='valid', dim_ordering='tf', input_shape=[imgSize,imgSize,1] , activity_regularizer = regul )) # 32
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(MaxPooling2D(pool_size=(3, 3) ))
model.add(Convolution2D(96, 5, 5 , border_mode='valid', activity_regularizer = regul )) # 32
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(MaxPooling2D(pool_size=(3, 3) ))
# model.add(Convolution2D(192, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 64
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(Convolution2D(192, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 64
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(Convolution2D(256, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 64
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(MaxPooling2D(pool_size=(3, 3), strides=(2,2) ))
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(MaxPooling2D(pool_size=(3, 3)))
# model.add(Dropout(0.25))
# # # this chucnk added - 14
# model.add(Convolution2D(256, 3, 3, border_mode='valid' , activity_regularizer = regul )) # 64
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(Convolution2D(256, 3, 3, border_mode='valid' , activity_regularizer = regul )) # 64
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(MaxPooling2D(pool_size=(3, 3)))
# model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512 , activity_regularizer = regul )) # 512
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(Dropout(0.5))
return model
# def make3dConvModel(imgSize,count,fork,skip,regul):
# #(samples, rows, cols, channels) if dim_ordering='tf'.
# convDrop = 0.25
# model = Sequential()
# if fork:
# model.add(Convolution3D(64, 1, 3, 3, border_mode='valid',dim_ordering='tf',input_shape=[count*2+1,imgSize,imgSize,1] , activity_regularizer = regul)) # 32
# else:
# model.add(Convolution3D(64, 1, 3, 3, border_mode='valid',dim_ordering='tf',input_shape=[imgSize/skip,imgSize/skip,imgSize/skip,1] , activity_regularizer = regul )) # 32
# # model.add(Convolution3D(48, 5, 5, 5, border_mode='valid',dim_ordering='tf',input_shape=[count*2+1,imgSize,imgSize,1] , activity_regularizer = regul)) # 32
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(Dropout(convDrop))
# model.add(Convolution3D(96, 1, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 32
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(MaxPooling3D(pool_size=(1, 3, 3 ))) ###
# model.add(Dropout(convDrop))
# model.add(Convolution3D(192, 1, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 32
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(Dropout(convDrop))
# model.add(Convolution3D(384, 1, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 32
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(MaxPooling3D(pool_size=(1, 3, 3 ))) ###
# model.add(Dropout(convDrop))
# model.add(Flatten())
# model.add(Dense(512 , activity_regularizer = regul )) # 512
# model.add(BatchNormalization())
# model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
# model.add(Dropout(0.5))
# return model
def make3dConvModel(imgSize,count,fork,skip,regul):
#(samples, rows, cols, channels) if dim_ordering='tf'.
convDrop = 0.25
model = Sequential()
if fork:
model.add(Convolution3D(64, 5, 5, 5, border_mode='valid',dim_ordering='tf',input_shape=[count*2+1,imgSize,imgSize,1] , activity_regularizer = regul)) # 32
else:
model.add(Convolution3D(64, 5, 5, 5, border_mode='valid',dim_ordering='tf',input_shape=[imgSize/skip,imgSize/skip,imgSize/skip,1] , activity_regularizer = regul )) # 32
# model.add(Convolution3D(48, 5, 5, 5, border_mode='valid',dim_ordering='tf',input_shape=[count*2+1,imgSize,imgSize,1] , activity_regularizer = regul)) # 32
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(Dropout(convDrop))
model.add(Convolution3D(128, 3, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 32
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(MaxPooling3D(pool_size=(3, 3, 3 ))) ###
model.add(Dropout(convDrop))
model.add(Convolution3D(256, 3, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 32
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(Dropout(convDrop))
model.add(Convolution3D(512, 3, 3, 3 , border_mode='valid' , activity_regularizer = regul )) # 32
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(MaxPooling3D(pool_size=(3, 3, 3 ))) ###
model.add(Dropout(convDrop))
model.add(Flatten())
model.add(Dense(512 , activity_regularizer = regul )) # 512
model.add(BatchNormalization())
model.add(advanced_activations.LeakyReLU(alpha=LRELUalpha))
model.add(Dropout(0.5))
return model
#
#
# `7MMF' `YMM' `7MM"""YMM `7MM"""Mq. db .M"""bgd `7MMF' `7MMF'`7MMF' .M"""bgd MMP""MM""YMM
# MM .M' MM `7 MM `MM. ;MM: ,MI "Y MM MM MM ,MI "Y P' MM `7
# MM .d" MM d MM ,M9 ,V^MM. `MMb. MM MM MM `MMb. MM
# MMMMM. MMmmMM MMmmdM9 ,M `MM `YMMNq. MMmmmmmmMM MM `YMMNq. MM
# MM VMA MM Y , MM YM. AbmmmqMA . `MM MM MM MM . `MM MM
# MM `MM. MM ,M MM `Mb. A' VML Mb dM MM MM MM Mb dM MM
# .JMML. MMb..JMMmmmmMMM .JMML. .JMM..AMA. .AMMA.P"Ybmmd" .JMML. .JMML..JMML.P"Ybmmd" .JMML.
#
#
# new normalization method
# get mean and std from training
def centerAndNormalize(arr):
# out = arr
# #
# out -= 1000.0
# out /= 2000.0
#
out = arr
oldMin = -1024
oldRange = 3071+1024
newRange = 1
newMin = 0
sikoAll = ((( out - oldMin) * newRange) / oldRange) + newMin
return sikoAll
# get mean and std from training
def centerAndStandardizeTraining(arr):
out = arr
#
mean = np.mean(out,axis=(0) )
std = np.std(out,axis=(0) )
#
out -= mean
out /= (std + np.finfo(float).eps )
#
return mean,std,out
# apply mean and std to val and test
def centerAndStandardizeValTest(arr,mean,std):
out = arr
#
out -= mean
out /= (std + np.finfo(float).eps )
#
return out
def AUC(test_labels,test_prediction,nb):
# http://scikit-learn.org/stable/auto_examples/model_selection/plot_roc.html#sphx-glr-auto-examples-model-selection-plot-roc-py
# Compute ROC curve and ROC area for each class
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(nb):
# ( actual labels, predicted probabilities )
fpr[i], tpr[i], _ = roc_curve(test_labels[:, i], test_prediction[:, i] ) # flip here
roc_auc[i] = auc(fpr[i], tpr[i])
return [ round(roc_auc[x],3) for x in range(nb) ]
def AUCalt( test_labels , test_prediction):
# convert to non-categorial
test_prediction = np.array( [ x[1] for x in test_prediction ])
test_labels = np.array( [ 0 if x[0]==1 else 1 for x in test_labels ])
# get rates
fpr, tpr, thresholds = roc_curve(test_labels, test_prediction, pos_label=1)
# get auc
myAuc = auc(fpr, tpr)
return myAuc
class Histories(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.train_loss = []
self.auc = []
self.logits = []
self.val_loss = []
# save json representation
model_json = self.model.to_json()
with open("/home/ahmed/output/" + RUN + "_json.json", "w") as json_file:
json_file.write(model_json)
dataFrameTrain,dataFrameValidate,dataFrameTest= manageDataFrames()
#
x_val,y_val,zeros,ones = getXandY(dataFrameValidate,imgSize)
print ("validation data:" , x_val.shape, y_val.shape , zeros , ones )
self.dataFrameValidate = dataFrameValidate
self.y_val = y_val
# lets do featurewiseCenterAndStd - its still a cube at this point
# x_val_cs = centerAndStandardizeValTest(x_val,mean,std)
x_val_cs = centerAndNormalize(x_val)
# x_val_cs = x_val
if fork:
# lets get the 3 orientations
self.x_val_a,self.x_val_s,self.x_val_c = krs.splitValTest(x_val_cs,finalSize,imgSize,count,mode,fork,skip)
print ("final val data:" , self.x_val_a.shape,self.x_val_s.shape,self.x_val_c.shape)
else:
# lets get one only
self.x_val = krs.splitValTest(x_val_cs,finalSize,imgSize,count,mode,fork,skip)
print ("final val data:" , x_val.shape)
return
def on_train_end(self, logs={}):
return
def on_epoch_begin(self, epoch, logs={}):
return
def on_epoch_end(self, epoch, logs={}):
# # val_loss__ = self.model.test_on_batch ( [ self.x_val ] , self.y_val )[0]
# val_loss_ = self.model.evaluate ( [ self.x_val ] , self.y_val , batch_size = self.dataFrameValidate.shape[0] )[0]
# if epoch > 300:
# if all(val_loss_< i for i in self.val_loss):
# self.model.save_weights("/home/ahmed/output/" + RUN + "_model.h5")
# print("Saved model to disk")
# # save model and json representation
# model_json = self.model.to_json()
# with open("/home/ahmed/output/" + RUN + "_json.json", "w") as json_file:
# json_file.write(model_json)
# # append and save train loss
# self.train_loss.append(logs.get('loss'))
# np.save( "/home/ahmed/output/" + RUN + "_train_loss.npy", self.train_loss)
# # append and save train loss
# self.val_loss.append(val_loss_)
# np.save( "/home/ahmed/output/" + RUN + "_val_loss.npy", self.val_loss)
# print ( "val loss: " , val_loss_ )
# print ( "val loss: " , val_loss_ )
# logits = self.model.predict ( [ self.x_val ] )
# print ( "\npredicted val zeros: " , len( [ x for x in logits if x[0] > x[1] ] ) )
# print ( "predicted val ones: " , len( [ x for x in logits if x[0] < x[1] ] ) )
# logits = np.array(logits)
# print ("logits: " , logits.shape , logits[0] )
# auc1 , auc2 = AUC( self.y_val , logits )
# print ("\nauc1: " , auc1 , " auc2: " , auc2)
# print ("wtf2")
# # append and save auc
# self.auc.append(auc1)
# np.save( "/home/ahmed/output/" + RUN + "_auc.npy", self.auc)
# # append and save logits
# self.logits.append(logits)
# np.save( "/home/ahmed/output/" + RUN + "_logits.npy", self.logits)
###############################################################################################################
logits = []
for i in range (self.dataFrameValidate.shape[0]):
if fork:
if mode == "3d":
# get predictions
y_pred = self.model.predict_on_batch ( [ self.x_val_a[i].reshape(1,count*2+1,imgSize,imgSize,1) ,
self.x_val_s[i].reshape(1,count*2+1,imgSize,imgSize,1) ,
self.x_val_c[i].reshape(1,count*2+1,imgSize,imgSize,1) ] )
elif mode == "2d":
# get predictions
y_pred = self.model.predict_on_batch ( [ self.x_val_a[i].reshape(1,imgSize,imgSize,1) ,
self.x_val_s[i].reshape(1,imgSize,imgSize,1) ,
self.x_val_c[i].reshape(1,imgSize,imgSize,1) ] )
else:
if mode == "3d":
# get predictions
dim = int ( imgSize/( 1.0* skip) )
y_pred = self.model.predict_on_batch ( [ self.x_val[i].reshape(1,dim,dim,dim,1) ] )
# y_pred = self.model.predict_on_batch ( [ self.x_val[i].reshape(1,count*2+1,imgSize,imgSize,1) ] )
elif mode == "2d":
# get predictions
y_pred = self.model.predict_on_batch ( [ self.x_val[i].reshape(1,imgSize,imgSize,1) ] )
# now after down with switching
logits.append( y_pred[0] )
print ( "\npredicted val zeros: " , len( [ x for x in logits if x[0] > x[1] ] ) )
print ( "predicted val ones: " , len( [ x for x in logits if x[0] < x[1] ] ) )
logits = np.array(logits)
print ("logits: " , logits.shape , logits[0] )
aucs = AUC( self.y_val , logits , NUMCLASSES )
print ("\naucs: " , aucs)
print ("wtf2")
# # before appending, check if this auc is the highest in all the list, if yes save the h5 model
#
if epoch > 10:
if all(aucs[0]>i for i in self.auc):
self.model.save_weights("/home/ahmed/output/" + RUN + "_model.h5")
print("Saved model to disk")
# save model and json representation
model_json = self.model.to_json()
with open("/home/ahmed/output/" + RUN + "_json.json", "w") as json_file:
json_file.write(model_json)
# append and save train loss
self.train_loss.append(logs.get('loss'))
np.save( "/home/ahmed/output/" + RUN + "_train_loss.npy", self.train_loss)
# append and save auc
self.auc.append(aucs[0])
np.save( "/home/ahmed/output/" + RUN + "_auc.npy", self.auc)
# append and save logits
self.logits.append(logits)
np.save( "/home/ahmed/output/" + RUN + "_logits.npy", self.logits)
return
def on_batch_begin(self, batch, logs={}):
return
def on_batch_end(self, batch, logs={}):
return
#
#
# `7MMF' `YMM' `7MM"""YMM `7MM"""YMM `7MM"""Mq.
# MM .M' MM `7 MM `7 MM `MM.
# MM .d" MM d MM d MM ,M9
# MMMMM. MMmmMM MMmmMM MMmmdM9
# MM VMA MM Y , MM Y , MM
# MM `MM. MM ,M MM ,M MM
# .JMML. MMb..JMMmmmmMMM .JMMmmmmMMM .JMML.
#
#
# define funcs
# if fork:
# # (0 = test, 1 = train)
# axialFunc = K.function([ self.model.layers[0].layers[0].layers[0].input , K.learning_phase() ],
# [ self.model.layers[0].layers[0].layers[-1].output ] )
# sagittalFunc = K.function([ self.model.layers[0].layers[1].layers[0].input , K.learning_phase() ],
# [ self.model.layers[0].layers[1].layers[-1].output ] )
# coronalFunc = K.function([ self.model.layers[0].layers[2].layers[0].input , K.learning_phase() ],
# [ self.model.layers[0].layers[2].layers[-1].output ] )
# mergeFunc = K.function([ self.model.layers[1].input , K.learning_phase() ],
# [ self.model.layers[2].output ] )
# softmaxFunc = K.function([ self.model.layers[3].input , K.learning_phase() ],
# [ self.model.layers[3].output ] )
# else:
# print("no fork - not tested")
# use funcs
# if mode == "2d":
# # get the different ones
# axial512 = axialFunc( [ self.x_val_a[i].reshape(1,imgSize,imgSize,1) , 0 ] )
# sagittal512 = sagittalFunc( [ self.x_val_s[i].reshape(1,imgSize,imgSize,1) , 0 ] )
# coronal512 = coronalFunc( [ self.x_val_c[i].reshape(1,imgSize,imgSize,1) , 0 ] )
# if mode == "3d":
# axial512 = axialFunc( [ self.x_val_a[i].reshape(1,count*2+1,imgSize,imgSize,1) , 0 ] )
# sagittal512 = sagittalFunc( [ self.x_val_s[i].reshape(1,count*2+1,imgSize,imgSize,1) , 0 ] )
# coronal512 = coronalFunc( [ self.x_val_c[i].reshape(1,count*2+1,imgSize,imgSize,1) , 0 ] )
# # concat them
# concat = []
# concat.extend ( axial512[0][0].tolist() )
# concat.extend ( sagittal512[0][0].tolist() )
# concat.extend ( coronal512[0][0].tolist() )
# #
# concat = np.array(concat ,'float32').reshape(1,len(concat))
# # now do one last function
# preds = mergeFunc( [ concat , 0 ])
# #
# logitsBal = np.array( [ preds[0][0][0] , preds[0][0][1] ] ) .reshape(1,2) # * zeroWeight - * oneWeight
# logits.append( softmaxFunc( [ logitsBal , 0 ]) [0].reshape(2) )
# run 54
# def manageDataFramesLung1():
# trainList = ["lung2"]
# testList = ["nsclc_rt"] # or lung1 # needs fixing
# dataFrame = pd.DataFrame.from_csv('master_170228.csv', index_col = 0)
# dataFrame = dataFrame [
# ( pd.notnull( dataFrame["pathToData"] ) ) &
# ( pd.notnull( dataFrame["pathToMask"] ) ) &
# ( pd.notnull( dataFrame["stackMin"] ) ) &
# ( pd.isnull( dataFrame["patch_failed"] ) ) &
# # ( pd.notnull( dataFrame["surv1yr"] ) ) &
# ( pd.notnull( dataFrame["surv2yr"] ) ) &
# ( pd.notnull( dataFrame["histology_grouped"] ) ) &
# ( pd.notnull( dataFrame["stage"] ) )
# # ( pd.notnull( dataFrame["age"] ) )
# ]
# dataFrame = dataFrame.reset_index(drop=True)
# ###### FIX ALL
# #1# clean histology - remove smallcell and other
# # histToInclude - only NSCLC
# histToInclude = [1.0,2.0,3.0,4.0]
# # not included - SCLC and other and no data [ 0,5,6,7,8,9 ]
# dataFrame = dataFrame [ dataFrame.histology_grouped.isin(histToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# #2# use 1,2,3 stages
# stageToInclude = [1.0,2.0,3.0]
# dataFrame = dataFrame [ dataFrame.stage.isin(stageToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# ###### GET TRAINING
# dataFrameTrain = dataFrame [ dataFrame["dataset"].isin(trainList) ]
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# # now split into training and validation
# dataFrameTrain = dataFrameTrain.sample( frac=1 , random_state = 1 )
# dataFrameTrain, dataFrameValidate = np.split(dataFrameTrain,[ int(.80*len(dataFrameTrain)) ])
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# dataFrameValidate = dataFrameValidate.reset_index(drop=True)
# ##### GET TEST
# # for RT
# dataFrameTest = dataFrame [ dataFrame["dataset"].isin(testList) ]
# #3# type of treatment - use only radio or chemoRadio - use .npy file
# chemoRadio = np.load("rt_chemoRadio.npy").astype(str)
# dataFrameTest = dataFrameTest [ dataFrameTest["patient"].isin(chemoRadio) ]
# #4# (rt only) use all causes of death
# # not implemented
# dataFrameTest = dataFrameTest.reset_index(drop=True)
# # for lung2
# # dataFrameTest = dataFrame [ dataFrame["dataset"].isin(testList) ]
# # dataFrameTest = dataFrameTest.reset_index(drop=True)
# print ("train patients " , dataFrameTrain.shape)
# print ("validate patients : " , dataFrameValidate.shape)
# print ("test size : " , dataFrameTest.shape)
# return dataFrameTrain, dataFrameValidate,dataFrameTest
# run 51
# def manageDataFramesRTn1():
# trainList = ["nsclc_rt"]
# testList = ["lung2"] # or lung2
# dataFrame = pd.DataFrame.from_csv('master_170228.csv', index_col = 0)
# dataFrame = dataFrame [
# ( pd.notnull( dataFrame["pathToData"] ) ) &
# ( pd.notnull( dataFrame["pathToMask"] ) ) &
# ( pd.notnull( dataFrame["stackMin"] ) ) &
# ( pd.isnull( dataFrame["patch_failed"] ) ) &
# # ( pd.notnull( dataFrame["surv1yr"] ) ) &
# ( pd.notnull( dataFrame["surv2yr"] ) ) &
# ( pd.notnull( dataFrame["histology_grouped"] ) ) &
# ( pd.notnull( dataFrame["stage"] ) )
# # ( pd.notnull( dataFrame["age"] ) )
# ]
# dataFrame = dataFrame.reset_index(drop=True)
# ###### FIX ALL
# #1# clean histology - remove smallcell and other
# # histToInclude - only NSCLC
# histToInclude = [1.0,2.0,3.0,4.0]
# # not included - SCLC and other and no data [ 0,5,6,7,8,9 ]
# dataFrame = dataFrame [ dataFrame.histology_grouped.isin(histToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# #2# use 1,2,3 stages
# stageToInclude = [1.0,2.0,3.0]
# dataFrame = dataFrame [ dataFrame.stage.isin(stageToInclude) ]
# dataFrame = dataFrame.reset_index(drop=True)
# ###### GET TRAINING
# dataFrameTrain = dataFrame [ dataFrame["dataset"].isin(trainList) ]
# #3# type of treatment - use only radio or chemoRadio - use .npy file
# chemoRadio = np.load("rt_chemoRadio.npy").astype(str)
# dataFrameTrain = dataFrameTrain [ dataFrameTrain["patient"].isin(chemoRadio) ]
# #4# (rt only) use all causes of death
# # not implemented
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# # now split into training and validation
# dataFrameTrain = dataFrameTrain.sample( frac=1 , random_state = 1 )
# dataFrameTrain, dataFrameValidate = np.split(dataFrameTrain,[ int(.87*len(dataFrameTrain)) ])
# dataFrameTrain = dataFrameTrain.reset_index(drop=True)
# dataFrameValidate = dataFrameValidate.reset_index(drop=True)
# ##### GET TEST
# dataFrameTest = dataFrame [ dataFrame["dataset"].isin(testList) ]
# dataFrameTest = dataFrameTest.reset_index(drop=True)
# print ("train patients " , dataFrameTrain.shape)
# print ("validate patients : " , dataFrameValidate.shape)
# print ("test size : " , dataFrameTest.shape)
# return dataFrameTrain, dataFrameValidate,dataFrameTest | [
"i.ahmedhosny@gmail.com"
] | i.ahmedhosny@gmail.com |
38fa1bdcf32d9a41324dc89afe6c727eb8ccee83 | 7c7e998d59511c752061005cddc921833ae6372d | /bot.py | 9ad445cb1a03d2736110465c709c3b1e07ff068d | [] | no_license | 999WRLD999/ECO.bot-v1 | 1d53a8c2b05fbbf4d848cf14c7986216c1c1c7d1 | e0e760b9beab81a6dfb0e4d5f67067d4a5ac05cc | refs/heads/main | 2023-03-21T19:34:27.142517 | 2021-03-14T22:31:04 | 2021-03-14T22:31:04 | 347,770,111 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,449 | py | import discord
import requests
import json
import discord.utils
import random
import string
import asyncio
from discord.ext import commands
Version = 'v1.0.2.7 - Alpha.4'
bot = commands.Bot(command_prefix='$')
bot.remove_command("help")
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
def check(author):
def inner_check(message):
return message.author == author
return inner_check
@bot.command()
@commands.cooldown(2, 10, commands.BucketType.user)
async def work(ctx):
authorid = str(ctx.author.id)
cashearned = ''.join(random.choice(string.digits) for i in range(3))
await ctx.send(f"Goodjob, you earned ${cashearned} at work today")
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
usercashjson[authorid] += int(cashearned)
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
print(authorid)
@bot.command()
@commands.cooldown(1, 20, commands.BucketType.user)
async def mine(ctx):
rustypickaxe = discord.utils.get(ctx.author.guild.roles, name="Rusty Pickaxe")
steelpickaxe = discord.utils.get(ctx.author.guild.roles, name="Steel Pickaxe")
goldenpickaxe1 = discord.utils.get(ctx.author.guild.roles, name="Golden Pickaxe")
magmaritepickaxe = discord.utils.get(ctx.author.guild.roles, name="Magmarite Pickaxe")
with open('items.json', 'r') as f:
itemjson = json.load(f)
with open("mineraldata.json", 'r') as f:
mineraljson = json.load(f)
author = str(ctx.author.id)
if rustypickaxe in ctx.author.roles:
await ctx.send("You're `Rusty Pickaxe` mined three rubies! ")
itemjson[author][0]['rubies'] += 3
with open('items.json', 'w') as f:
json.dump(itemjson, f, indent=4)
elif steelpickaxe in ctx.author.roles:
await ctx.send("You're `Steel Pickaxe` mined six rubies! ")
itemjson[author][0]['rubies'] += 6
with open('items.json', 'w') as f:
json.dump(itemjson, f, indent=4)
if goldenpickaxe1 in ctx.author.roles:
randommineral_goldpick = random.choice(['magmarite', "saphire", "alumanite"])
await ctx.send(f"You're `Golden Pickaxe` mined thirteen rubies and 3 {randommineral_goldpick}")
mineraljson[author][0][randommineral_goldpick] += 3
itemjson[author][0]['rubies'] += 13
with open('items.json', 'w') as f:
json.dump(itemjson, f, indent=4)
with open('mineraldata.json', 'w') as f:
json.dump(mineraljson, f, indent=4)
if magmaritepickaxe in ctx.author.roles:
randommineral_magmarpick = random.choice(['magmarite', "saphire", "hellian", "alumanite"])
await ctx.send(f"You're `Magmarite Pickaxe` mined fifty rubies and six {randommineral_magmarpick}")
mineraljson[author][0][randommineral_magmarpick] += 6
itemjson[author][0]['rubies'] += 50
with open('items.json', 'w') as f:
json.dump(itemjson, f, indent=4)
with open('mineraldata.json', 'w') as f:
json.dump(mineraljson, f, indent=4)
else:
await ctx.send("You just mined one ruby! ")
itemjson[author][0]['rubies'] += 1
with open('items.json', 'w') as f:
json.dump(itemjson, f, indent=4)
@mine.error
async def clear_error(ctx, error):
if isinstance(error, commands.MissingRole):
await ctx.send("You are missing the `pickaxe` role; you can buy it in the shop!")
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, you're on cooldown! Chill out before you overwork yourself.")
@work.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, you're on cooldown! Chill out before you overwork yourself.")
@bot.command()
async def sellrubies(ctx):
authorid = str(ctx.author.id)
with open("items.json", 'r') as f:
itemjson = json.load(f)
if itemjson[authorid][0]["rubies"] >= 1:
await ctx.send(f"Sold: {itemjson[authorid][0]['rubies']} Rubies")
for i in range(itemjson[authorid][0]["rubies"]):
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
itemjson[authorid][0]["rubies"] -= 1
usercashjson[authorid] += 250
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
with open('items.json', 'w') as f:
json.dump(itemjson, f, indent=4)
else:
await ctx.send("You don't have any `rubies` to sell! Do $mine to collect them!")
@bot.command()
async def bal(ctx):
authorid = str(ctx.author.id)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
embed = discord.Embed(title=f'{ctx.author}\'s Balance', description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name="Balance", value=f"${usercashjson[str(ctx.author.id)][authorid]}")
await ctx.send(embed=embed)
@bot.command()
async def shop(ctx):
embed = discord.Embed(title=f"Shop", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name="pickaxe", value="$5,000", inline=False)
embed.add_field(name="Gun", value="$5,000", inline=False)
embed.add_field(name='Weedfarm Business', value="$50,000", inline=False)
embed.add_field(name="Meth Lab", value="$100,000", inline=False)
embed.add_field(name="Methlab Trailer", value="$2,500", inline=False)
embed.add_field(name="King Monke", value="$3,000,000", inline=False)
await ctx.send(embed=embed)
await ctx.send("To purchase an item, do $buy(item) all in one word, no capitals.")
@bot.command()
async def buygun(ctx):
author = str(ctx.author.id)
global usercash, gun
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if usercashjson[str(ctx.author.id)][str(ctx.author.id)] >= 5000:
usercashjson[str(ctx.author.id)] -= 5000
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
itemsjson[author][0]["gun"] += 1
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
usercashjson[author] -= 5000
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
await ctx.send("Purchased `gun`, this will protect you against robbers and attackers!")
gun += 1
else:
await ctx.send("Sorry, you do not have enough money to purchase this item.")
@bot.command()
async def buyweedfarm(ctx):
global weedfarm
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
authorid = str(ctx.author.id)
if usercashjson[str(ctx.author.id)][str(ctx.author.id)] >= 50000:
if itemsjson[authorid][0]["weedfarm"] ==1:
await ctx.send("Sorry, you already have a weed farm.")
else:
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
usercashjson[authorid][str(ctx.author.id)] -= 50000
itemsjson[authorid][0]["weedfarm"] += 1
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
await ctx.send("You just purchased your very first `weed farm` here you can produce weed, do it manually or purchase workers to produce weed for you!")
embed = discord.Embed(title="Weed Farm Statistics", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name=f"Worker Statistics", value=f"Worker Amount: NULL")
embed.add_field(name=f"Cash Per Cycle Statistics", value=f"Producing: ${cpc}")
await ctx.send(embed=embed)
await ctx.send("""
``` Commands:
$weedfarmstats - Provides the statistics of your weed farm
$buyweedfarmworker - Purchases a worker for your weed farm
$collectcashweedfarm - Collects the cash from your weed farm
$weedfarm_help
```
""")
else:
await ctx.send("Pfft, you need more cash to buy this! It costs $50000 to open up a weed farm.")
@bot.command()
async def weedfarmstats(ctx):
global cpc
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
if itemsjson[str(ctx.author.id)][0]["weedfarm"] >= 1:
embed = discord.Embed(title="Weed Farm Statistics", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name=f"Worker Statistics", value=f"Worker Amount: NULL")
embed.add_field(name=f"Cash Per Cycle Statistics", value=f"Producing: ${itemsjson[str(ctx.author.id)][0]['cpc']}")
await ctx.send(embed=embed)
@bot.command()
async def buyweedfarmworker(ctx):
with open('items.json', 'r') as f:
itemsjson = json.load(f)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if usercashjson[str(ctx.author.id)][str(ctx.author.id)] >= 1000:
itemsjson[str(ctx.author.id)][0]['cpc'] += 125
usercashjson[str(ctx.author.id)][str(ctx.author.id)] -= 1000
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
await ctx.send("Purchased 1 Worker")
embed = discord.Embed(title="Weed Farm Statistics", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name=f"Worker Statistics", value=f"Worker Amount: NULL")
embed.add_field(name=f"Cash Per Cycle Statistics", value=f"Producing: ${itemsjson[str(ctx.author.id)][0]['cpc']}")
await ctx.send(embed=embed)
@bot.command()
@commands.cooldown(1, 60, commands.BucketType.user)
async def sellweed(ctx):
with open('items.json', 'r') as f:
itemsjson = json.load(f)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
usercashjson[str(ctx.author.id)] += itemsjson[str(ctx.author.id)][0]['cpc']
await ctx.send(f"Wow Boss, you collected ${itemsjson[str(ctx.author.id)][0]['cpc']} from your weed business!")
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
@sellweed.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, let your weed grow! Collect cash from your weed farm every 60 seconds!!")
@bot.command()
async def weedfarm_help(ctx):
await ctx.send("""
``` Commands:
$weedfarmstats - Provides the statistics of your weed farm
$buyweedfarmworker - Purchases a worker for your weed farm
$collectcashweedfarm - Collects the cash from your weed farm
$weedfarm_help
```""")
@bot.command()
async def help(ctx):
embed = discord.Embed(title="Command List", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name="Commands", value="""
$work - allocates you to work for a set amount of cash twice per minute
$mine - with a `pickaxe` you can mine rubies which can then be sold for $$$
$shop - displays the shop
$updates - displays the update log
$crime - commits a ***legal*** activity
$gamble (amount) - Gambles a set amount of cash, 50/50 chance to fill your pockets; or leak them.
$rob @person - Robs someone, easy cash, easy guap.
$buy(item) - Purchases an item, no capitals, no spaces.
$give (amount) @person - Gives cash to whoever you ping!
$prestige - Shows your current prestige
$beg - Asks a random person for money
$inventory - Shows all your current items, (DOES NOT SHOW ORES)
$sellrubies - Sells your rubies
$forgerecipes - Shows all the forging Recipes
$forge (pickaxe name) - Forges a pickaxe
""")
embed.set_footer(text=f"Version: {Version}", icon_url="https://cdn.discordapp.com/emojis/754736642761424986.png")
embed.set_author(name=f"Requested by: {ctx.author.name}", icon_url=ctx.author.avatar_url)
await ctx.send(embed=embed)
embed1 = discord.Embed(title="Command List Page two", description=f"**Version: {Version}**", color=0x00ff00)
embed1.add_field(name="Command Page 2", value="""
$upgradepickaxe - Upgrades your current pickaxe
$weedfarmstats - Provides the statistics of your `Weed Farm`
$buyweedfarmworker - Purchases a worker for your `Weed Farm`
$collectcashweedfarm - Collects the cash from your `Weed Farm`
$weedfarm_help - Displays all the commands for the `Weed Farm`
$buymethlab - Purchases a `Methlab`
$collectmeth - Collects meth from your trailers
$sellmeth (amount)- Sells all your `Meth baggies`
$buymethlabtrailer (amount) - Purchases a `Methlab Trailer`
$methlabstats - Displays the statis of your `Methlab`
""")
embed1.set_footer(text=f"Version: {Version}", icon_url="https://cdn.discordapp.com/emojis/754736642761424986.png")
embed1.set_author(name=f"Requested by: {ctx.author.name}", icon_url=ctx.author.avatar_url)
await ctx.send(embed=embed1)
@bot.command()
async def updates(ctx):
embed = discord.Embed(title="Update 2.6.1", description=f"**Version: v.{Version}**", color=0x00ff00)
embed.set_author(name=f"Requested by: {ctx.author.name}",icon_url=ctx.author.avatar_url)
embed.set_footer(text=f"Version: {Version}",
icon_url="https://cdn.discordapp.com/emojis/754736642761424986.png")
embed.add_field(name="**Added**", value="", inline=False)
await ctx.send(embed=embed)
@commands.cooldown(6, 60, commands.BucketType.user)
@bot.command()
async def crime(ctx):
with open('items.json', 'r') as f:
itemsjson = json.load(f)
chances = [1, 2]
arrestrate = ''.join(random.choice(string.digits)for i in range(2))
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if int(arrestrate) >= 90:
if itemsjson[str(ctx.author.id)][0]["gun"] >= 1:
await ctx.send("You decide to bring your 9mm out! You're in a live shootout with the police!")
win = random.choice(chances)
if win == 2:
await ctx.send("You won! You shot his leg then bit his ear! Ran away with $3000")
usercashjson[ctx.author.id][str(ctx.author.id)] += 3000
json.dump(usercashjson, f, indent=4)
else:
await ctx.send("You lost:( He shot your ear then bit your leg, those hospital fees gon be expensive, -$5000")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] -= 5000
json.dump(usercashjson, f, indent=4)
else:
await ctx.send("**You have been arrested, lost $10000**")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] -= 10000
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
elif int(arrestrate) >= 80:
await ctx.send("You robbed a casino Stole $2450!")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] += 2450
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
elif int(arrestrate) >= 70:
await ctx.send("You robbed a train! Stole $1500")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] += 1500
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
elif int(arrestrate) >= 60:
await ctx.send("You robbed a bar! Stole $1000")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] += 1000
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
elif int(arrestrate) >= 50:
await ctx.send("You robbed your Grandmas house! Stole all her rubies!")
itemsjson[str(ctx.author.id)][0]["rubies"] += 5
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
elif int(arrestrate) >= 40:
await ctx.send("You robbed Jess's house! Stole her vibra-, $34")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] += 34
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
elif int(arrestrate) >= 30:
await ctx.send("You robbed WRLD's! Stole his Supreme Water Bottle, $100")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] += 100
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
elif int(arrestrate) >= 20:
await ctx.send("You robbed WRLD's! Stole his Supreme Water Bottle, $100")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] += 100
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
elif int(arrestrate) >= 10:
await ctx.send("You robbed Tristian's trailor! He had no money:( ")
elif int(arrestrate) >= 0:
await ctx.send("You robbed my house?! You lost $1250!!!")
usercashjson[str(ctx.author.id)][str(ctx.author.id)] -= 1250
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
@crime.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, buddy, stop stealing, you're on cooldown!")
@commands.cooldown(1, 60, commands.BucketType.user)
@bot.command()
async def rob(ctx, member: discord.Member):
robee = member.id
cashstolen = ''.join(random.choice(string.digits) for i in range(4))
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if int(cashstolen) >= usercashjson[str(robee)][str(robee)]:
while int(cashstolen) > usercashjson[str(robee)][str(robee)]:
cashstolen = ''.join(random.choice(string.digits) for i in range(4))
usercashjson[str(robee)][str(robee)] -= int(cashstolen)
usercashjson[str(ctx.author.id)][str(ctx.author.id)] += int(cashstolen)
await ctx.send(f"**You stole ${cashstolen}, stash it in the warehouse Bain!**")
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
@rob.error
async def error_clear(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Stop robbing people! You're on cooldown for 120 Seconds!!11!")
@bot.command()
async def buymethlab(ctx):
author = str(ctx.author.id)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if usercashjson[author][str(ctx.author.id)] >= 100000:
with open('items.json', 'r') as f:
itemsjson = json.load(f)
if itemsjson[author][0]["methlab"] >= 1:
await ctx.send("Sorry, you already have a `methlab`!")
itemsjson[author][0]['methlab'] += 1
usercashjson[str(ctx.author.id)][str(ctx.author.id)] -= 100000
await ctx.send("You just purchased your very first methlab! You can see all the commands with $methlab_help!")
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
else:
await ctx.send("Sorry! You need $100000 to purchase the `meth lab`")
@bot.command()
async def buymethlabtrailer(ctx, amounttobuy: int):
mpc = 0
author = str(ctx.author.id)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if usercashjson[author][str(ctx.author.id)] >= amounttobuy * 2500:
if itemsjson[author][0]["methlab"] == 1:
await ctx.send(f"Purchased: {amounttobuy} Trailer/s")
for i in range(amounttobuy):
itemsjson[author][0]["methtrailers"] += 1
usercashjson[author][str(ctx.author.id)] -= 2500
embed = discord.Embed(title="Meth Lab Statistics", description=f"**Version: {Version}**", color=discord.Color.green())
embed.add_field(name=f"Trailer Statistics", value=f"Trailer Amount: {itemsjson[author][0]['methtrailers']}")
for i in range(itemsjson[author][0]["methtrailers"]):
mpc += 1
embed.add_field(name=f"MBs Per Cycle Statistics", value=f"Producing: {mpc} MPC")
await ctx.send(embed=embed)
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
else:
await ctx.send("You don't have a `methlab`, buy one with $buymethlab!")
else:
await ctx.send("Sorry! You do not have enough money to purchase this! $2500")
@buymethlabtrailer.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, the trailer companies are getting sus of you! Slowdown!")
@bot.command()
async def methlab_help(ctx):
await ctx.send("""
```Commands:
$buymethlabtrailer
$buymethlab
$collectmeth
$sellmeth
$methlab_help
$methlabstats
```
""")
@bot.command()
@commands.cooldown(3, 60, commands.BucketType.user)
async def methlabstats(ctx):
author = str(ctx.author.id)
mpc = 0
with open('items.json', 'r') as f:
itemsjson = json.load(f)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if itemsjson[author][0]["methlab"]:
embed = discord.Embed(title="Meth Lab Statistics", description=f"**Version: {Version}**",
color=discord.Color.green())
embed.add_field(name=f"Trailer Statistics", value=f"Trailer Amount: {itemsjson[author][0]['methtrailers']}")
embed.add_field(name=f"MBs Per Cycle Statistics", value=f"Producing: {itemsjson[author][0]['methtrailers']} MPC")
await ctx.send(embed=embed)
else:
await ctx.send("You do not own a `methlab`, buy one with $buymethlab!")
@bot.command()
@commands.cooldown(1, 60, commands.BucketType.user)
async def collectmeth(ctx):
author = str(ctx.author.id)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
itemsjson[author][0]["methbags"] += itemsjson[author][0]["methtrailers"]
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
await ctx.send(f"Collected: {itemsjson[author][0]['methtrailers']} Methbags")
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
@bot.command()
async def sellmeth(ctx, amounttosell: int):
sold = 0
author = str(ctx.author.id)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if itemsjson[author][0]["methlab"]:
if itemsjson[author][0]["methbags"] >= 1:
itemsjson[author][0]['methbags'] -= amounttosell
usercashjson[author][str(ctx.author.id)] += 1000 * amounttosell
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
await ctx.send(f"Sold: {amounttosell} Methbags for ${amounttosell * 1000}")
else:
await ctx.send("You don't have any `methbags`! Go collect them with $collectmeth")
else:
await ctx.send("You do not own a `methlab`, buy one with $buymethlab!")
@sellmeth.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, no way you have ***that*** much meth, you're on cooldown!")
@collectmeth.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, no way you have ***that*** much meth, you're on cooldown!")
@bot.command()
@commands.cooldown(1, 20)
async def beg(ctx):
author = str(ctx.author.id)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
begcash = ''.join(random.choice(string.digits) for i in range(3))
usercashjson[author][str(ctx.author.id)] += int(begcash)
await ctx.send(f"Fineee :rolling_eyes:, ig ill give u cash ${begcash}")
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
@beg.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa poor begger, you're on cooldown! ")
@commands.cooldown(2, 60)
@bot.command()
async def scam(ctx, member: discord.Member):
with open('items.json', 'r') as f:
itemsjson = json.load(f)
options = ["rubies", "methbags"]
chosenoption = random.choice(options)
amountstolen = random.choice(string.digits)
while int(amountstolen) > itemsjson[str(member.id)][0]["rubies"]:
amountstolen = random.choice(string.digits)
if int(amountstolen) == 0:
amountstolen = 1
itemsjson[str(member.id)][0][chosenoption] -= int(amountstolen)
itemsjson[str(ctx.author.id)][0][chosenoption] += int(amountstolen)
await ctx.send(f'You stole {amountstolen} {chosenoption} from {member.name}')
with open('items.json', 'w') as f:
json.dump(itemsjson, f, indent=4)
@scam.error
async def clear_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send("Whoa, you're on cooldown! Please stop scamming for a bit")
@bot.command()
async def gamble(ctx):
author = str(ctx.author.id)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
betamount = int(ctx.message.content[8:])
if usercashjson[author][str(ctx.author.id)] < betamount:
await ctx.send("Sorry! You don't have that much cash! *bum*")
else:
chance = random.choice([1, 2])
if chance == 2:
await ctx.send("Generating chance. . .")
await asyncio.sleep(1)
usercashjson[author][str(ctx.author.id)] += betamount
await ctx.send("You won! Nice on ya lad!")
else:
await ctx.send("Generating chance. . .")
await asyncio.sleep(1)
usercashjson[author][str(ctx.author.id)] -= betamount
await ctx.send("You lost! Wanna rob the Casino and get your money back? Do $heist casino!")
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
@bot.command()
async def upgradepickaxe(ctx):
rustypickaxe = discord.utils.get(ctx.author.guild.roles, name="Rusty Pickaxe")
steelpickaxe = discord.utils.get(ctx.author.guild.roles, name="Steel Pickaxe")
goldenpickaxe = discord.utils.get(ctx.author.guild.roles, name="Golden Pickaxe")
txt = discord.utils.get(ctx.author.guild.channels, name='general')
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
author = str(ctx.author.id)
if rustypickaxe in ctx.author.roles:
if usercashjson[author][str(ctx.author.id)] >= 15000:
await ctx.author.add_roles(steelpickaxe)
await ctx.author.remove_roles(rustypickaxe)
await ctx.send("Upgraded pickaxe to `Steel Pickaxe`!")
usercashjson[author][str(ctx.author.id)] -= 15000
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
if steelpickaxe in ctx.author.roles:
if usercashjson[author][str(ctx.author.id)] >= 17500:
await ctx.author.add_roles(goldenpickaxe)
await ctx.author.remove_roles(steelpickaxe)
await ctx.send("Upgraded pickaxe to `Golden Pickaxe`!")
usercashjson[author][str(ctx.author.id)] -= 17500
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
await ctx.send("Too upgrade your tools from here, you need to collect minerals only obtainable through mining, crafting recipes will be available if you do $forgelist.")
@bot.command()
async def forgerecipes(ctx):
pickaxerecips = """
Magmarite Pickaxe - 15 Magmarite, 9 Sapphire
Hellian Pickaxe - 16 Magmarite, 13 Sapphire, 4 Hellian
"""
embed = discord.Embed(title="Forging Recipes", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name=f"Pickaxe Recipes", value=f"{pickaxerecips}")
embed.set_footer(text=f"Version: {Version}",
icon_url="https://cdn.discordapp.com/emojis/754736642761424986.png")
embed.set_author(name=f"Requested by: {ctx.author.name}", icon_url=ctx.author.avatar_url)
await ctx.send(embed=embed)
@bot.command()
async def give(ctx, amounttogive: int, member: discord.Member):
author = str(ctx.author.id)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
if usercashjson[author][str(ctx.author.id)] >= amounttogive:
usercashjson[author][str(ctx.author.id)] -= amounttogive
usercashjson[str(member.id)][str(ctx.author.id)] += amounttogive
embed = discord.Embed(title="Give Cash", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name=f"Cash Given", value=f"${amounttogive}")
embed.set_footer(text=f"Version: {Version}",
icon_url="https://cdn.discordapp.com/emojis/754736642761424986.png")
embed.set_author(name=f"Requested by: {ctx.author.name}", icon_url=ctx.author.avatar_url)
await ctx.send(embed=embed)
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
else:
ctx.send("You don't have enough cash to send to them!")
@bot.command()
async def forge(ctx, pickaxe: str):
print(pickaxe)
if pickaxe == "magmaritepickaxe":
author = str(ctx.author.id)
with open("mineraldata.json", 'r') as f:
mineralsjson = json.load(f)
if mineralsjson[author][0]["magmarite"] >= 15:
if mineralsjson[author][0]["saphire"] >= 9:
goldenpickaxe = discord.utils.get(ctx.author.guild.roles, name="Golden Pickaxe")
if goldenpickaxe in ctx.author.roles:
await ctx.send("You forged a `Magmarite Pickaxe`!")
role = discord.utils.get(ctx.author.guild.roles, name="Magmarite Pickaxe")
await ctx.author.add_roles(role)
mineralsjson[author][0]["magmarite"] -= 15
mineralsjson[author][0]["saphire"] -= 9
await ctx.author.remove_roles(goldenpickaxe)
with open('mineraldata.json', 'w') as f:
json.dump(mineralsjson, f, indent=4)
else:
await ctx.send("You need a `Golden Pickaxe` to forge the `Magmarite Pickaxe`!")
else:
await ctx.send("You don't have enough `Sapphire!`, do $mine with a Pickaxe to collect some!")
else:
await ctx.send("You don't have enough `Magmarite!`, do $mine with a Pickaxe to collect some!")
if pickaxe == "hellianpickaxe":
author = str(ctx.author.id)
with open("mineraldata.json", 'r') as f:
mineralsjson = json.load(f)
if mineralsjson[author][0]["magmarite"] >= 16:
if mineralsjson[author][0]["saphire"] >= 13:
if mineralsjson[author][0]["hellian"] >= 4:
magmarpick = discord.utils.get(ctx.author.guild.roles, name="Magmarite Pickaxe")
if magmarpick in ctx.author.roles:
await ctx.send("You forged a `Hellian Pickaxe`!")
role = discord.utils.get(ctx.author.guild.roles, name="Hellian Pickaxe")
await ctx.author.add_roles(role)
mineralsjson[author][0]["magmarite"] -= 16
mineralsjson[author][0]["saphire"] -= 13
mineralsjson[author][0]['hellian'] -= 4
await ctx.author.remove_roles(magmarpick)
with open('mineraldata.json', 'w') as f:
json.dump(mineralsjson, f, indent=4)
else:
await ctx.send("You need a `Magmarite Pickaxe` to forge the `Hellian Pickaxe`!")
else:
await ctx.send("You don't have enough `Hellian!`, do $mine with a Pickaxe to collect some!")
else:
await ctx.send("You don't have enough `Sapphire!`, do $mine with a Pickaxe to collect some!")
else:
await ctx.send("You don't have enough `Magmarite!`, do $mine with a Pickaxe to collect some!")
@bot.command()
async def buykingmonke(ctx):
author = str(ctx.author.id)
with open('usercash.json', 'r') as f:
usercashjson = json.load(f)
role = discord.utils.get(ctx.author.guild.roles, name="👑King Monk👑")
if usercashjson[author][str(ctx.author.id)] >= 3000000:
await ctx.author.add_roles(role)
else:
await ctx.send("You cannot afford King Monke")
@bot.command()
async def bananaphone(ctx):
role = discord.utils.get(ctx.author.guild.roles, name="👑King Monk👑")
if role in ctx.author.roles:
for i in range(100):
await ctx.send(":banana:")
def verifycheck(message):
global captcha
if message.content != captcha:
return TypeError
return message.content == captcha
@bot.command()
async def verify(ctx):
global captcha
captcha = ''.join(random.choice(string.ascii_letters) for i in range(6))
role = discord.utils.get(ctx.author.guild.roles, name='verified')
embed = discord.Embed(title="Captcha Verification", description="*Please complete this captcha to get access to the server*", color=discord.Color.purple())
embed.add_field(name="Captcha Provided", value=f"{captcha}")
await ctx.send(embed=embed)
try:
await bot.wait_for('message', check=verifycheck, timeout=5)
if TypeError:
await ctx.send('failed')
except asyncio.TimeoutError:
await ctx.send("**The verification token has expired! Please send $verify to start a new one.**")
else:
await ctx.author.add_roles(role)
@bot.command()
async def inventory(ctx):
author = str(ctx.author.id)
with open('items.json', 'r') as f:
itemsjson = json.load(f)
embed = discord.Embed(title="Prestige", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name="Inventory Items ", value=f"""
Rubies: {itemsjson[author][0]["rubies"]}
Weed Farms: {itemsjson[author][0]["weedfarm"]}
Meth Labs: {itemsjson[author][0]["methlab"]}
Meth baggies: {itemsjson[author][0]["methbags"]}
Meth Trailers: {itemsjson[author][0]["methtrailers"]}
""")
embed.set_footer(text=f"Version: {Version}", icon_url="https://cdn.discordapp.com/emojis/754736642761424986.png")
embed.set_author(name=f"Requested by: {ctx.author.name}", icon_url=ctx.author.avatar_url)
await ctx.send(embed=embed)
@bot.command()
async def prestige(ctx):
with open("presteige.json", 'r') as f:
prestigejson = json.load(f)
embed = discord.Embed(title="Prestige", description=f"**Version: {Version}**", color=0x00ff00)
embed.add_field(name="Prestige", value=f"{prestigejson[str(ctx.author.id)]}")
embed.set_footer(text=f"Version: {Version}", icon_url="https://cdn.discordapp.com/emojis/754736642761424986.png")
embed.set_author(name=f"Requested by: {ctx.author.name}", icon_url=ctx.author.avatar_url)
await ctx.send(embed=embed)
@bot.command()
async def start(ctx):
with open("mineraldata.json", 'r') as f:
mineraljson = json.load(f)
with open("usercash.json", 'r') as f:
usercashjson = json.load(f)
mineraljson[str(ctx.author.id)] = [
{
"saphire": 0,
"magmarite": 0,
"alumanite": 0,
"hellian": 0,
"ECOi": 0,
"fishre": 0
}
]
author = str(ctx.author.id)
usercashjson[author] = {author: 0}
with open('mineraldata.json', 'w') as f:
json.dump(mineraljson, f, indent=4)
with open('usercash.json', 'w') as f:
json.dump(usercashjson, f, indent=4)
bot.run('urbottokenhere')
| [
"noreply@github.com"
] | noreply@github.com |
b79a9b710f88b92e919b4b75f4e4d0094a5287ed | c7b31209cc7b5a015ca34d1174e7978730ce6733 | /rpplugins/env_probes/environment_capture_stage.py | 9bbe136a0b8d893af6e96f81148c9d987fbae7be | [
"MIT"
] | permissive | gitter-badger/RenderPipeline | c244343def6dd33e55e78cd828f0c703b338ce1a | 4d4bf4164c8dcb188f93e46749ba52de8f61b37f | refs/heads/master | 2021-01-22T00:52:25.396315 | 2016-04-16T13:13:57 | 2016-04-16T13:15:27 | 56,395,593 | 0 | 0 | null | 2016-04-16T17:04:37 | 2016-04-16T17:04:37 | null | UTF-8 | Python | false | false | 7,877 | py | """
RenderPipeline
Copyright (c) 2014-2016 tobspr <tobias.springer1@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import division
from rplibs.six.moves import range
from rplibs.six import itervalues
from panda3d.core import Camera, PerspectiveLens, Vec4, Vec3, PTAInt, GraphicsOutput
from rpcore.globals import Globals
from rpcore.image import Image
from rpcore.render_stage import RenderStage
class EnvironmentCaptureStage(RenderStage):
""" This stage renders the scene to a cubemap """
required_inputs = ["DefaultEnvmap", "AllLightsData", "maxLightIndex"]
required_pipes = []
def __init__(self, pipeline):
RenderStage.__init__(self, pipeline)
self.resolution = 128
self.diffuse_resolution = 4
self.regions = []
self.cameras = []
self.rig_node = Globals.render.attach_new_node("EnvmapCamRig")
self.pta_index = PTAInt.empty_array(1)
self.storage_tex = None
self.storage_tex_diffuse = None
def create(self):
self.target = self.create_target("CaptureScene")
self.target.size = self.resolution * 6, self.resolution
self.target.add_depth_attachment(bits=16)
self.target.add_color_attachment(bits=16, alpha=True)
self.target.prepare_render(None)
# Remove all unused display regions
internal_buffer = self.target.internal_buffer
internal_buffer.remove_all_display_regions()
internal_buffer.disable_clears()
internal_buffer.get_overlay_display_region().disable_clears()
self._setup_camera_rig()
self._create_store_targets()
self._create_filter_targets()
def _setup_camera_rig(self):
""" Setups the cameras to render a cubemap """
directions = (Vec3(1, 0, 0), Vec3(-1, 0, 0), Vec3(0, 1, 0),
Vec3(0, -1, 0), Vec3(0, 0, 1), Vec3(0, 0, -1))
# Prepare the display regions
for i in range(6):
region = self.target.internal_buffer.make_display_region(
i / 6, i / 6 + 1 / 6, 0, 1)
region.set_sort(25 + i)
region.set_active(True)
region.disable_clears()
# Set the correct clears
region.set_clear_depth_active(True)
region.set_clear_depth(1.0)
region.set_clear_color_active(True)
region.set_clear_color(Vec4(0))
lens = PerspectiveLens()
lens.set_fov(90)
lens.set_near_far(0.001, 1.0)
camera = Camera("EnvmapCam-" + str(i), lens)
camera_np = self.rig_node.attach_new_node(camera)
camera_np.look_at(camera_np, directions[i])
region.set_camera(camera_np)
self.regions.append(region)
self.cameras.append(camera_np)
self.cameras[0].set_r(90)
self.cameras[1].set_r(-90)
self.cameras[3].set_r(180)
self.cameras[5].set_r(180)
# Register cameras
for camera_np in self.cameras:
self._pipeline.tag_mgr.register_envmap_camera(camera_np.node())
def _create_store_targets(self):
""" Creates the targets which copy the result texture into the actual storage """
self.target_store = self.create_target("StoreCubemap")
self.target_store.size = self.resolution * 6, self.resolution
self.target_store.prepare_buffer()
self.target_store.set_shader_input("SourceTex", self.target.color_tex)
self.target_store.set_shader_input("DestTex", self.storage_tex)
self.target_store.set_shader_input("currentIndex", self.pta_index)
self.temporary_diffuse_map = Image.create_cube("DiffuseTemp", self.resolution, "RGBA16")
self.target_store_diff = self.create_target("StoreCubemapDiffuse")
self.target_store_diff.size = self.resolution * 6, self.resolution
self.target_store_diff.prepare_buffer()
self.target_store_diff.set_shader_input("SourceTex", self.target.color_tex)
self.target_store_diff.set_shader_input("DestTex", self.temporary_diffuse_map)
self.target_store_diff.set_shader_input("currentIndex", self.pta_index)
def _create_filter_targets(self):
""" Generates the targets which filter the specular cubemap """
self.filter_targets = []
mip = 0
size = self.resolution
while size > 1:
size = size // 2
mip += 1
target = self.create_target("FilterCubemap:{0}-{1}x{1}".format(mip, size))
target.size = size * 6, size
target.prepare_buffer()
target.set_shader_input("currentIndex", self.pta_index)
target.set_shader_input("currentMip", mip)
target.set_shader_input("SourceTex", self.storage_tex)
target.set_shader_input("DestTex", self.storage_tex, False, True, -1, mip, 0)
self.filter_targets.append(target)
# Target to filter the diffuse cubemap
self.filter_diffuse_target = self.create_target("FilterCubemapDiffuse")
self.filter_diffuse_target.size = self.diffuse_resolution * 6, self.diffuse_resolution
self.filter_diffuse_target.prepare_buffer()
self.filter_diffuse_target.set_shader_input("SourceTex", self.temporary_diffuse_map)
self.filter_diffuse_target.set_shader_input("DestTex", self.storage_tex_diffuse)
self.filter_diffuse_target.set_shader_input("currentIndex", self.pta_index)
def set_probe(self, probe):
self.rig_node.set_mat(probe.matrix)
self.pta_index[0] = probe.index
def update(self):
# First, disable all targets
for target in itervalues(self._targets):
target.active = False
# Check for updated faces
for i in range(6):
if self._pipeline.task_scheduler.is_scheduled("envprobes_capture_envmap_face" + str(i)):
self.regions[i].set_active(True)
# Check for filtering
if self._pipeline.task_scheduler.is_scheduled("envprobes_filter_and_store_envmap"):
self.target_store.active = True
self.target_store_diff.active = True
self.filter_diffuse_target.active = True
for target in self.filter_targets:
target.active = True
def set_shader_input(self, *args):
Globals.render.set_shader_input(*args)
def reload_shaders(self):
self.target_store.shader = self.load_plugin_shader(
"store_cubemap.frag.glsl")
self.target_store_diff.shader = self.load_plugin_shader(
"store_cubemap_diffuse.frag.glsl")
self.filter_diffuse_target.shader = self.load_plugin_shader(
"filter_cubemap_diffuse.frag.glsl")
for i, target in enumerate(self.filter_targets):
target.shader = self.load_plugin_shader("mips/{}.autogen.glsl".format(i))
| [
"tobias.springer1@googlemail.com"
] | tobias.springer1@googlemail.com |
5d10a9d692575a05db729af626128d544ad5b613 | 13141a522f31660b2b90d949b56eb02acb5580b7 | /动态规划/064 Minimum Path Sum.py | 24b9e2be3bf06b278a323bd0ea7618c6e087121b | [] | no_license | Busc/LeetCode-learning | a2341fd00d1e4fb4e41d701f1c6385e88f6a8539 | 4a32734908b2efe0577e11807981e0c37b0dd8d4 | refs/heads/master | 2020-12-02T07:48:45.817641 | 2018-04-03T08:24:48 | 2018-04-03T08:24:48 | 96,728,879 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 982 | py | '''
Array -- Medium
Given a m x n grid filled with non-negative numbers, find a path from top left to bottom right which minimizes the sum of all numbers along its path.
Note: You can only move either down or right at any point in time.
'''
class Solution(object):
def minPathSum(self, grid):
"""
:param grid:List[List[int]]
:return: int
"""
rows = len(grid)
cols = len(grid[0])
# one-dimensional
dp = [0 for __ in range(cols)]
dp[0] = grid[0][0]
for j in range(1, cols):
dp[j] = dp[j-1]+grid[0][j]
for i in range(1, rows):
dp[0] += grid[i][0]
for j in range(1, cols):
dp[j] = min(dp[j], dp[j-1])+grid[i][j]
return dp[-1]
if __name__ == "__main__":
# assert Solution().minPathSum([
# [1, 2, 4],
# [2, 4, 1],
# [3, 2, 1]]) == 9
print(Solution().minPathSum([[1, 2, 4], [2, 4, 1], [3, 2, 1]])) | [
"762521772@qq.com"
] | 762521772@qq.com |
789197ced20ddb38961512fdfd6b70fdaf08dcf2 | c35008ff51c71e03bd4d14fdc3b561d4efe6d25a | /lab/06/lab06_b_d.py | add41c0a8d468dd3a47eaaf85d7f3a38622f81ad | [
"MIT"
] | permissive | reveriess/TarungLabDDP1 | 2300b08b068bb8ffb08e686260008ceca89b5f14 | f2298d462a94dfb89a6c6a88ee4de4fff9acd671 | refs/heads/master | 2020-08-18T02:44:52.371416 | 2019-10-18T08:16:48 | 2019-10-18T08:16:48 | 215,738,559 | 0 | 0 | MIT | 2019-10-18T08:22:39 | 2019-10-17T08:13:40 | Python | UTF-8 | Python | false | false | 1,547 | py | '''
Benny's Plagiarism Filter
Mengecek set-set yang berisi 64-bit hashcode dari file-file submisi tugas
pemrograman dan mencetak output berupa set yang berisi hashcode yang tidak
memiliki duplikat.
'''
set_qty = int(input("Masukkan banyaknya set yang akan diinput: "))
curr_set = set()
print("Untuk {} baris berikutnya, masukkan set yang berisi ".format(set_qty) +
"hashcode yang akan dicek:")
'''
Meminta input berupa set sebanyak set_qty yang telah diinput oleh user.
Untuk setiap set yang diinput, hilangkan spasi untuk menyeragamkan set sehingga
setiap elemen hanya dipisahkan dengan ",". Hilangkan { dan } yang ada di awal
dan akhir input, kemudian buat menjadi set baru a_set dengan setiap elemennya
dipisahkan oleh ", ". Simpan elemen-elemen yang ada di a_set ke dalam
curr_set, dan simpan irisan curr_set dengan a_set berikutnya di intersections.
Setelah semua set masuk ke curr_set, hilangkan elemen-elemen yang ada di
intersections dari curr_set.
'''
intersections = set()
for i in range(set_qty):
a_set = input().replace(' ', '').strip('{}')
a_set = set(a_set.split(','))
if i > 0:
intersections |= curr_set & a_set
curr_set |= a_set
curr_set -= intersections
'''
Menghilangkan string kosong yang mungkin ada di curr_set jika ada tepat satu
set yang diinput yang hanya memiliki tepat satu elemen (tidak ada ", ").
Kemudian menghilangkan tanda ' karena elemen-elemen curr_set masih bertipe
string.
'''
curr_set.discard('')
print(str(curr_set).replace("'", ""))
| [
"laymonage@gmail.com"
] | laymonage@gmail.com |
25ebdff821aece21611aec20575591caec4d23c7 | c740a51903b1af235cc9bdd472f58ee55f796564 | /nlp/nlp_api/views.py | a8b1c003941ae0fe4a46de7ee277478586c0ad56 | [] | no_license | godejiao/nlp | 610a2cfba30c9d4a0dca01b259304890176d00e9 | c7f9acf1648f99701d0f56cd1c5b61f9f0ebc5b1 | refs/heads/master | 2021-01-01T20:14:34.667726 | 2017-10-16T01:53:26 | 2017-10-16T01:53:26 | 98,798,019 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,219 | py | # -*- coding: utf-8 -*-
from django.shortcuts import render, render_to_response
from django.core.exceptions import ObjectDoesNotExist
from django.http import JsonResponse, HttpResponse, StreamingHttpResponse
from django.views.decorators.csrf import csrf_exempt
import json
from api.baidu_api import BaiduNLP
from api.tencent_api import TencentNLP
from api.boson_api import BosonNLPAPI,BOKEYLIST
from api.ltp_api import LtpNLP
from api.snow_api import SnowNLPAPI
from nlp_api.forms import SearchForm, SearchKeyWordsFrom
import requests
class NLP (object):
def __init__(self) :
self._nlp_dict = {"tencent" : TencentNLP, "baidu" : BaiduNLP, "boson" : BosonNLPAPI, "ltp" : LtpNLP,'snow':SnowNLPAPI}
def get_nlp(self, mode = str("boson"),idkey='') :
return self._nlp_dict[mode](idkey)
# nlp=NLP()
# nlp_class = nlp.get_nlp(1)
# 分词和词性标注
@csrf_exempt
def lexicalAnalysis(request) :
result = {}
if request.method == "POST" :
try :
text = request.POST.get('text', None).encode('utf-8')
engine = request.POST.get('engine', None)
type = request.POST.get('type', None)
space_mode = int(request.POST.get('space_mode',int(0)))
oo_level = int(request.POST.get('level',int(3)))
t2s = int(request.POST.get('t2s',int(0)))
special_char_conv = int(request.POST.get('special_char_conv',int(0)))
message = str()
if text and engine and type:
nlp = NLP()
if engine == 'tencent' :
data = nlp.get_nlp(engine).lexicalAnalysis(text, type)
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'baidu' :
data = nlp.get_nlp(engine).lexicalAnalysis(text)
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'boson' :
message = limitmessage = 'HTTPError: 429 count limit exceeded'
while message == limitmessage :
try :
data = nlp.get_nlp("boson", BOKEYLIST[0]).wordSeg(text,space_mode,oo_level,t2s,special_char_conv)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
message = ''
except Exception, e :
message = str(e)
if message == limitmessage :
BOKEYLIST.append(BOKEYLIST.pop(0))
elif message :
result['message'] = message
result['status'] = 6
result['data'] = 0
elif engine == 'ltp' :
# seg=nlp.get_nlp(engine).wordSeg(text)
# pos=nlp.get_nlp(engine).wordPos(text)
result['data'] = u'' # {'seg':seg,'pos':pos}
result['message'] = u'暂未开通'
result['status'] = 4
elif engine == 'snow':
text = text.decode('utf-8')
result['data'] = ''
result['status'] = 4
result['message'] = u''
else :
result['data'] = ''
result['status'] = 5
result['message'] = u'引擎请求错误'
else :
result['data'] = ''
result['message'] = u'获取失败'
result['status'] = 3
except Exception, e :
result['data'] = ""
result['message'] = str(e)
result['status'] = 2
else:
result['status'] = 1
result['message'] = u'请求失败'
return HttpResponse(json.dumps(result))
# 命名实体识别
@csrf_exempt
def lexical(request) :
result = {}
if request.method == "POST" :
try :
text = request.POST.get('text',None).encode('utf-8')
engine = request.POST.get('engine',None)
type = request.POST.get('type',None)
sensitivity = request.POST.get('sensitivity',None)
message = str()
if text and engine and type:
nlp = NLP()
if engine == 'tencent' :
data = nlp.get_nlp("tencent").lexicalAnalysis(text, type)
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'baidu' :
data = nlp.get_nlp("baidu").lexicalAnalysis(text)
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'boson' :
message = limitmessage = 'HTTPError: 429 count limit exceeded'
while message == limitmessage :
try :
print BOKEYLIST[0]
data = nlp.get_nlp("boson", BOKEYLIST[0]).lexicalAnalysis(text,sensitivity = sensitivity)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
message = ''
except Exception, e :
message = str(e)
if message == limitmessage :
BOKEYLIST.append(BOKEYLIST.pop(0))
elif message :
result['message'] = message
result['status'] = 6
result['data'] = 0
elif engine == 'ltp' :
# data=nlp.get_nlp("ltp").lexicalAnalysis(text)
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 4
elif engine == 'snow' :
text = text.decode('utf-8')
# data=nlp.get_nlp(engine).lexicalAnalysis(text)
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 4
else:
result['data'] = ''
result['status'] = 5
result['message'] = u'引擎请求错误'
else :
result['data'] = ''
result['message'] = u'获取失败'
result['status'] = 3
except Exception, e :
result['data'] = ""
result['message'] = str(e)
result['status'] = 2
else :
result['message'] = u'请求失败'
result['status'] = 1
return HttpResponse(json.dumps(result))
# 关键词提取
@csrf_exempt
def textKeywords(request) :
result = {}
if request.method == "POST" :
text = request.POST.get('text',None).encode('utf-8')#str
engine = request.POST.get('engine',None).encode('utf-8')
title = request.POST.get('title',None).encode('utf-8')
channel = request.POST.get('channel',None)
type = request.POST.get('type',None)
top_k = request.POST.get('top_k',int(10))
message = str()
try :
if text and engine and title and channel and type :
nlp = NLP()
if engine == 'tencent' :
data = nlp.get_nlp("tencent").textKeywords(title, text, channel)
result['data'] = str(data).encode('utf-8')
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'baidu' :
data = nlp.get_nlp("baidu").commentTag(text, int(type))
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
# elif engine == 'boson' :
# try:
# data = nlp.get_nlp("boson",BOKEYLIST[0]).textKeywords(text,top_k)
# result['data'] = data
# except Exception,e:
# message = str(e)
# limitmessage = 'HTTPError: 429 count limit exceeded'
# if message == limitmessage :
# BOKEYLIST.append(BOKEYLIST.pop(0))
# data = nlp.get_nlp("boson", BOKEYLIST[0]).textKeywords(text,top_k)
# result['data'] = data
# result['status'] = 0
# result['message'] = u'获取成功'
#
# elif engine == 'boson2':
# try:
# data = nlp.get_nlp('boson',BOKEYLIST[0]).textKeywords(text,top_k)
# result['data'] = data
# print BOKEYLIST[0]
# finally:
# print BOKEYLIST[0]
# BOKEYLIST.append(BOKEYLIST.pop(0))
# data = nlp.get_nlp('boson',BOKEYLIST[0]).textKeywords(text,top_k)
# result['data'] = data
# result['message'] = u'获取成功'
# result['status'] = 0
# elif engine == 'boson3':
# headers = {'X-Token' : BOKEYLIST[0]}
# RATE_LIMIT_URL = 'http://api.bosonnlp.com/application/rate_limit_status.json'
# limit_result = requests.get(RATE_LIMIT_URL, headers = headers).json()
# count_limit_remaining = limit_result['limits']['keywords']['count-limit-remaining']
#
# if count_limit_remaining < 20 :
# BOKEYLIST.append(BOKEYLIST.pop(0))
#
# data = nlp.get_nlp("boson", BOKEYLIST[0]).textKeywords(text,int(top_k))
#
# result['data'] = data
# result['message'] = u'获取成功'
# result['status'] = 0
elif engine == 'boson' :
message = limitmessage = 'HTTPError: 429 count limit exceeded'
while message == limitmessage:
try :
print BOKEYLIST[0]
data = nlp.get_nlp("boson", BOKEYLIST[0]).textKeywords(text, top_k)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
message = ''
except Exception, e :
message = str(e)
if message == limitmessage :
BOKEYLIST.append(BOKEYLIST.pop(0))
elif message:
result['message'] = message
result['status'] = 6
result['data'] = 0
elif engine == 'ltp' :
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 4
elif engine == 'snow':
text = text.decode('utf-8')
data = nlp.get_nlp(engine).textKeywords(text,int(top_k))
# data = [(b,a) for a, b in data]
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
else :
result['data'] = ''
result['status'] = 5
result['message'] = u'引擎请求错误'
else :
result['data'] = ''
result['message'] = u'获取失败'
result['status'] = 3
except Exception, e :
result['data'] = ""
result['message'] = str(e)+message
result['status'] = 2
else :
result['message'] = u'请求失败'
result['status'] = 1
return HttpResponse(json.dumps(result))
# 情感分析
@csrf_exempt
def textSentiment(request) :
result = {}
if request.method == "POST" :
try :
text = request.POST.get('text', None).encode('utf-8')
engine = request.POST.get('engine', None)
type = request.POST.get('type', None)
model = request.POST.get('model','general')
message = str()
if text and engine and type:
nlp = NLP()
if engine == 'tencent' :
data = nlp.get_nlp("tencent").textSentiment(text, type)
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'baidu' :
data = nlp.get_nlp("baidu").textSentiment(text)
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'boson' :
message = limitmessage = 'HTTPError: 429 count limit exceeded'
while message == limitmessage :
try :
print BOKEYLIST[0]
data = nlp.get_nlp("boson", BOKEYLIST[0]).textSentiment(text,model)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
message = ''
except Exception, e :
message = str(e)
if message == limitmessage :
BOKEYLIST.append(BOKEYLIST.pop(0))
elif message :
result['message'] = message
result['status'] = 6
result['data'] = 0
elif engine == 'ltp' :
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 4
elif engine == 'snow':
text = text.decode('utf-8')
result['data'] = nlp.get_nlp('snow').textSentiment(text)
result['status'] = 0
result['message'] = u'获取成功'
else :
result['data'] = ''
result['status'] = 5
result['message'] = u'引擎请求错误'
else :
result['data'] = ''
result['message'] = u'获取失败'
result['status'] = 3
except Exception, e :
result['data'] = ""
result['message'] = str(e)
result['status'] = 2
else:
result['status'] = 1
result['message'] = u'请求失败'
return HttpResponse(json.dumps(result))
# 文本分类
@csrf_exempt
def textClassify(request) :
result = {}
if request.method == "POST" :
try :
text = request.POST.get('text', None).encode('utf-8')
engine = request.POST.get('engine', None)
message = str()
if text and engine :
nlp = NLP()
if engine == 'tencent' :
data = nlp.get_nlp(engine).textClassify(text)
result['data'] = data
result['message'] = u'获取成功'
result['status'] = 0
elif engine == 'baidu' :
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 4
elif engine == 'boson' :
message = limitmessage = 'HTTPError: 429 count limit exceeded'
while message == limitmessage :
try :
print BOKEYLIST[0]
data = nlp.get_nlp("boson", BOKEYLIST[0]).textClassify(text)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
message = ''
except Exception, e :
message = str(e)
if message == limitmessage :
BOKEYLIST.append(BOKEYLIST.pop(0))
elif message :
result['message'] = message
result['status'] = 6
result['data'] = 0
elif engine == 'ltp' :
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 4
elif engine == "snow":
text = text.decode('utf-8')
result['data'] = ''
result['status'] =4
result['message'] = u''
else :
result['data'] = ''
result['status'] = 5
result['message'] = u'引擎请求错误'
else :
result['data'] = ''
result['message'] = u'获取失败'
result['status'] = 3
except Exception, e :
result['data'] = ""
result['message'] = str(e)
result['status'] = 2
else:
result['status'] = 1
result['message'] = u'请求失败'
return HttpResponse(json.dumps(result))
# 句法分析
@csrf_exempt
def textDependency(request) :
result = {}
if request.method == "POST" :
try :
text = request.POST.get('text', None).encode('utf-8')
engine = request.POST.get('engine', None)
message = str()
if text and engine :
nlp = NLP()
if engine == 'tencent' :
# data=nlp.get_nlp(engine).textDependency(text)
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 1
elif engine == 'baidu' :
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 0
elif engine == 'boson' :
message = limitmessage = 'HTTPError: 429 count limit exceeded'
while message == limitmessage :
try :
print BOKEYLIST[0]
data = nlp.get_nlp("boson", BOKEYLIST[0]).textDependency(text)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
message = ''
except Exception, e :
message = str(e)
if message == limitmessage :
BOKEYLIST.append(BOKEYLIST.pop(0))
elif message :
result['message'] = message
result['status'] = 6
result['data'] = 0
elif engine == 'ltp' :
# data=nlp.get_nlp(engine).textDependency(text)
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 1
elif engine == "snow" :
text = text.decode('utf-8')
result['data'] = ''
result['status'] = 4
result['message'] = u''
else :
result['data'] = ''
result['status'] = 5
result['message'] = u'引擎请求错误'
else :
result['data'] = ''
result['message'] = u'获取失败'
result['status'] = 3
except Exception, e :
result['data'] = ""
result['message'] = str(e)
result['status'] = 2
else :
result['status'] = 1
result['message'] = u'请求失败'
return HttpResponse(json.dumps(result))
# 文本摘要
@csrf_exempt
def textSummary(request) :
result = {}
if request.method == "POST" :
try :
text = request.POST.get('text', None).encode('utf-8')#str
engine = request.POST.get('engine', None)
limit_of_boson = float(request.POST.get('limit_of_boson',float(0.3)))
limit_of_snow = int(request.POST.get('limit_of_snow',int(5)))
message = str()
if text and engine :
nlp = NLP()
if engine == 'tencent' :
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 1
elif engine == 'baidu' :
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 0
elif engine == 'boson' :
if limit_of_boson <= 1.0 and limit_of_boson > 0.0:
message = limitmessage = 'HTTPError: 429 count limit exceeded'
text = text.decode('utf-8')
while message == limitmessage :
try :
data = nlp.get_nlp("boson", BOKEYLIST[0]).textSummary(s = text, limit = limit_of_boson)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
message = ''
except Exception, e :
message = str(e)
if message == limitmessage :
BOKEYLIST.append(BOKEYLIST.pop(0))
elif message :
result['message'] = message
result['status'] = 6
result['data'] = 0
else:
result['status'] = 7
result['message'] = u'limit不符合要求'
result['data'] = ''
elif engine == 'ltp' :
# data=nlp.get_nlp(engine).textDependency(text)
result['data'] = u''
result['message'] = u'暂未开通'
result['status'] = 1
elif engine == "snow" :
text = text.decode('utf-8')
data = nlp.get_nlp(engine).textSummary(text,limit_of_snow)
result['data'] = data
result['status'] = 0
result['message'] = u'获取成功'
else :
result['data'] = ''
result['status'] = 5
result['message'] = u'引擎请求错误'
else :
result['data'] = ''
result['message'] = u'获取失败'
result['status'] = 3
except Exception, e :
result['data'] = ""
result['message'] = str(e)
result['status'] = 2
else :
result['status'] = 1
result['message'] = u'请求失败'
return HttpResponse(json.dumps(result)) | [
"godejiao@163.com"
] | godejiao@163.com |
87a1dc0038d74e0b17b12059d53dca1a56cf6db7 | e361af8c96db69a05bf1b5c72c22b67506f3e9b7 | /nuget/date_util.py | da425b4b11a2b46d63403164da6ab59f05a21d11 | [] | no_license | wwarren/nuget-package-scanner | 54f5336277e0e9efd7bf519a7bc8cbe55caff40b | 2bd192fa9a2a2265993b388f87d2a60ccf6b26c8 | refs/heads/master | 2022-05-29T16:46:38.890844 | 2020-05-04T13:26:00 | 2020-05-04T13:26:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | import datetime
def get_date_from_iso_string(iso_date_string: str) -> datetime.date:
assert isinstance(iso_date_string,str) and iso_date_string
parts = str.split(iso_date_string,'T')
return datetime.datetime.strptime(parts[0],'%Y-%m-%d')
| [
"donnieh@gmail.com"
] | donnieh@gmail.com |
dc79570687b43676257440dd230843ca43cb7599 | 975b245e32457be007114cb87501b8ee1922e1bc | /legacy/boilerPlates/pyxWin32/spyppKakaohTalkintextingpage.py | 29463fabc2e1c9469a714b2ee5c0986a77fe2f14 | [] | no_license | sachim1379/man | cc2a0247484936b802e8170eac090bcba75799ba | b74b81c1c4de1789d19220e744ba8938ac8e2fd9 | refs/heads/main | 2023-08-13T01:03:49.572169 | 2021-10-03T11:52:22 | 2021-10-03T11:52:22 | 389,310,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 884 | py | import time, win32con, win32api, win32gui ##pypiwin32 모듈 필요
#열려있는 대화창의 대화항목을 선택해서 입력한 메세지를 자동전송
#채팅방지정
kakao_opentalk_name = '재호'
def kakao_sendtext(text):
win32api.SendMessage(hwndEdit, win32con.WM_SETTEXT, 0, text)
SendReturn(hwndEdit)
## 엔터
def SendReturn(hwnd):
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN, win32con.VK_RETURN, 0)
time.sleep(0.01)
win32api.PostMessage(hwnd, win32con.WM_KEYUP, win32con.VK_RETURN, 0)
#핸들
hwndMain = win32gui.FindWindow( None, kakao_opentalk_name)
hwndEdit = win32gui.FindWindowEx( hwndMain, None, "RICHEDIT50W", None) ##RichEdit20W는 카톡안 대화텍스트박스의 클래스명
hwndListControl = win32gui.FindWindowEx( hwndMain, None, "EVA_VH_ListControl_Dblclk", None)
##텍스트전송
text = "SETTEXT_test"
kakao_sendtext(text) | [
"sachim1379@gmail.com"
] | sachim1379@gmail.com |
395b9d6b3eeb3dda9279993faf701f3d4c1cf382 | 5aa80aab7a75d76b0aa838bf8f74a276a12c876e | /src/ifmap/SConscript | 91667c2cbadf3ccc91e2f3828066f422d4455f95 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | tungstenfabric/tf-controller | 83b6d58afadb5697b540b5345711a5b2af90d201 | f825fde287f4eb2089aba2225ca73eeab3888040 | refs/heads/master | 2023-08-28T02:56:27.329584 | 2023-08-20T12:15:38 | 2023-08-20T12:31:34 | 231,070,970 | 55 | 29 | Apache-2.0 | 2023-07-23T01:38:17 | 2019-12-31T10:24:38 | C++ | UTF-8 | Python | false | false | 4,531 | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
# -*- mode: python; -*-
Import('BuildEnv')
env = BuildEnv.Clone()
env.CppDisableExceptions()
env.Append(CPPPATH = env['TOP'])
env.Append(CPPPATH = [env['TOP'] + '/ifmap'])
env.Append(CPPPATH = [env['TOP'] + '/config-client-mgr'])
env.Append(CPPPATH = [env['TOP'] + '/base/sandesh'])
env.Append(CPPPATH = [env['TOP'] + '/database'])
env.Prepend(LIBS=['sandesh', 'http', 'http_parser', 'curl', 'io', 'base'])
except_env = BuildEnv.Clone()
except_env.Append(CPPPATH = [env['TOP'] + '/ifmap'])
except_env.Append(CPPPATH = [env['TOP'] + '/config-client-mgr'])
except_env.Append(CPPPATH = [env['TOP'] + '/base/sandesh'])
except_env.CppEnableExceptions()
except_env.Append(CPPPATH = env['TOP'])
SandeshGenFiles = env.SandeshGenCpp('ifmap_server_show.sandesh')
SandeshGenFiles += env.SandeshGenOnlyCpp('ifmap_server_show_internal.sandesh')
SandeshGenFiles += env.SandeshGenCpp('ifmap_log.sandesh')
SandeshGenSrcs = env.ExtractCpp(SandeshGenFiles)
sandesh_objs = []
for src in SandeshGenSrcs:
objname = src.replace('.cpp', '.o')
obj = except_env.Object(objname, src)
sandesh_objs.append(obj)
ifmap_server = except_env.Object('ifmap_server.o', 'ifmap_server.cc')
ifmap_server_show = except_env.Object('ifmap_server_show.o', 'ifmap_server_show.cc')
ifmap_xmpp = except_env.Object('ifmap_xmpp.o', 'ifmap_xmpp.cc')
ifmap_xmpp_client_show = except_env.Object('ifmap_xmpp_client_show.o', 'ifmap_xmpp_client_show.cc')
AgentSandeshGenFiles = env.SandeshGenCpp('ifmap_agent.sandesh')
AgentSandeshGenSrcs = env.ExtractCpp(AgentSandeshGenFiles)
libifmap_common = env.Library('ifmap_common',
['ifmap_dependency_tracker.cc',
'ifmap_table.cc',
'ifmap_link.cc',
'ifmap_link_table.cc',
'ifmap_node.cc',
'ifmap_object.cc',
'ifmap_log.cc'] + sandesh_objs)
# control-node
libifmap = env.Library('ifmap_server',
['ifmap_client.cc',
'ifmap_config_listener.cc',
'ifmap_encoder.cc',
'ifmap_exporter.cc',
'ifmap_factory.cc',
'ifmap_graph_walker.cc',
'ifmap_node_proxy.cc',
ifmap_server_show,
ifmap_server,
'ifmap_server_parser.cc',
'ifmap_server_table.cc',
'ifmap_update.cc',
'ifmap_update_queue.cc',
'ifmap_update_sender.cc',
'ifmap_util.cc',
'ifmap_uuid_mapper.cc',
ifmap_xmpp,
ifmap_xmpp_client_show,
] + sandesh_objs)
# agent-module
libifmap_agent = env.Library('ifmap_agent',
[ 'ifmap_agent_parser.cc',
'ifmap_agent_table.cc',
'ifmap_agent_sandesh.cc',
] + AgentSandeshGenSrcs)
test_suite = env.SConscript('client/SConscript', exports='BuildEnv',
duplicate = 0)
test_suite += env.SConscript('test/SConscript', exports='BuildEnv',
duplicate = 0)
def code_coverage(target, source, env):
import shutil
shutil.rmtree(target[0].path, ignore_errors = True)
# lcov --base-directory $ROOT -- directory . --zerocounters -q
import os
os.system('lcov --base-directory . --directory ' + Dir('.').path +
' --zerocounters -q')
# execute tests
import subprocess
ShEnv = {env['ENV_SHLIB_PATH']: 'build/lib'}
for test in test_suite:
cmd = test[0].path
logfile = open(cmd + '.log', 'w')
subprocess.call([cmd], stdout=logfile, env=ShEnv)
# lcov --base-directory $ROOT -- directory . -c -o ifmap_test.info
os.system('lcov --base-directory . --directory ' + Dir('.').path +
' -c -o ifmap_test.info')
# genhtml -o ifmap/test_coverage ifmap_test.info
os.system('genhtml -o ' + target[0].path +
' -t "test coverage" --num-spaces 4 ifmap_test.info')
if env['OPT'] == 'coverage':
test_coverage = env.Command(Dir('test_coverage'), '', code_coverage)
env.AlwaysBuild(test_coverage)
env.Alias('src/ifmap:test_coverage', test_coverage)
| [
"andrey-mp@yandex.ru"
] | andrey-mp@yandex.ru | |
1729a45d5d706db63e49b78c57f9d9916f6030f6 | 65f9c08c1049ad88d7d972aa63a2b9b7b39dc3fa | /tests.py | 16fa273be167d333b893cef1aad92af143937522 | [] | no_license | ulturt/currency_converter | 496c03ad5695f8feb3bfad9e3ac86e99da0d73cb | f84d08aab62ee3f3b5f0b06fad23b26b6d883a28 | refs/heads/master | 2020-04-26T09:08:27.976497 | 2019-03-03T17:09:53 | 2019-03-03T17:09:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 738 | py | import unittest
from unittest import mock
from client import get_current_exchange_rate
from converter import convert_value
TEST_EXCHANGE_RATE = 10
class ConverterTestCase(unittest.TestCase):
@mock.patch('converter.get_current_exchange_rate', return_value=TEST_EXCHANGE_RATE)
def test_convert_value_has_right_input_value(self, _):
self.assertEqual(convert_value(1), TEST_EXCHANGE_RATE)
@mock.patch('converter.get_current_exchange_rate', return_value=None)
def test_convert_value_has_not_exchange_rate(self, _):
self.assertIsNone(convert_value(1))
def test_get_current_exchange_rate(self):
self.assertIsNotNone(get_current_exchange_rate())
if __name__ == '__main__':
unittest.main()
| [
"luzgin.st@gmail.com"
] | luzgin.st@gmail.com |
d0b9760362c2c896a11394121621237ba007551c | 64afcac06e3a64215d7d7152c4fa5662164a41e6 | /src/jk_sql/DBColDef.py | e36c558919fedb53a78002e8dc3f70873f63d78a | [
"Apache-2.0"
] | permissive | jkpubsrc/python-module-jk-sql | 7b4f12783b8384540404fa60c469c911955202a6 | cc716f4042af4cbc503056bd3f71cde9acd12ce2 | refs/heads/master | 2022-09-15T17:20:24.458796 | 2017-10-20T10:01:34 | 2017-10-20T10:01:34 | 107,655,550 | 0 | 1 | null | 2022-09-03T19:29:01 | 2017-10-20T08:58:04 | Python | UTF-8 | Python | false | false | 2,376 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import sqlite3
from .EnumDBColType import EnumDBColType
from .EnumDBIndexType import EnumDBIndexType
#
# This class represents a definition of a column. Objects of this type are used to either define a column or get information about a table column.
#
class DBColDef(object):
def __init__(self, fieldName, fieldType, bIsNullable, indexType):
assert isinstance(fieldName, str)
assert isinstance(fieldType, EnumDBColType)
assert isinstance(bIsNullable, bool)
assert isinstance(indexType, EnumDBIndexType)
if fieldType == EnumDBColType.PK:
bIsNullable = False
indexType = EnumDBIndexType.NONE
self.__name = fieldName
self.__type = fieldType
self.__bIsNullable = bIsNullable
self.__indexType = indexType
#
@property
def index(self):
return self.__indexType
#
@property
def nullable(self):
return self.__bIsNullable
#
@property
def unique(self):
return self.__indexType == EnumDBIndexType.UNIQUE_INDEX
#
@property
def type(self):
return self.__type
#
@property
def name(self):
return self.__name
#
def isEqualWithoutIndex(self, other):
return (self.__name == other.name) and (self.__type == other.type) and (self.__bIsNullable == other.nullable)
#
def __ne__(self, other):
return (self.__name != other.name) or (self.__type != other.type) or (self.__bIsNullable != other.nullable) or (self.__indexType != other.index)
#
def __eq__(self, other):
return (self.__name == other.name) and (self.__type == other.type) and (self.__bIsNullable == other.nullable) and (self.__indexType == other.index)
#
def __str__(self):
return self.__type + ": " + self.__name
#
def __repr__(self):
return self.__type + ": " + self.__name
#
def __copy__(self):
return DBColDef(self.__name, self.__type, self.__bIsNullable, self.__indexType)
#
def __deepcopy__(self, memo):
return DBColDef(self.__name, self.__type, self.__bIsNullable, self.__indexType)
#
@staticmethod
def loadFromJSON(jsonDef):
t = jsonDef["type"]
i = jsonDef["index"]
return DBColDef(jsonDef["name"], EnumDBColType.parse(t), jsonDef["nullable"], EnumDBIndexType.parse(i))
#
def toJSON(self):
return {
"name" : self.__name,
"type" : str(self.__type),
"nullable" : self.__bIsNullable,
"index" : str(self.__indexType)
}
#
#
| [
"pubsrc@binary-overflow.de"
] | pubsrc@binary-overflow.de |
3ee27f9d3949d61696851f7060da197716ec0708 | ddcdf4447f0e4fd2be9644c41ea88c313771034f | /ref_utils/ulti.py | 4dee460b6a1fecdfdb4a1b4eb245faf50cc15830 | [] | no_license | htzheng/ECCV2018_CrossNet_RefSR | c7048b34c79c28380df6c4c685c185c11c96720a | 03959f2dd45b09820115075afd83af50fe8d0814 | refs/heads/master | 2022-08-10T06:41:27.811265 | 2022-08-02T05:42:29 | 2022-08-02T05:42:29 | 142,181,203 | 97 | 24 | null | null | null | null | UTF-8 | Python | false | false | 10,779 | py | from six.moves import cPickle
import random
import numpy as np
import cv2
from scipy.signal import convolve2d,correlate2d
import matplotlib.pyplot as plt
import math
def psnr(img1, img2):
mse = np.mean( (img1 - img2) ** 2 )
if mse == 0:
return 100
PIXEL_MAX = 1.0
return 20 * math.log10(PIXEL_MAX / math.sqrt(mse))
# def psnr_with_mask_bidir(img1, img2, mask):
def psnr_with_mask(img1, img2, mask):
mask_rep = np.repeat(mask, img1.shape[1], axis = 1)
mse = np.sum( (img1 - img2) ** 2 * mask_rep ) / np.sum(mask_rep)
if mse == 0:
psnr_ = 100
else:
PIXEL_MAX = 1.0
psnr_ = 20 * math.log10(PIXEL_MAX / math.sqrt(mse))
mask_ratio = np.mean(mask)
return psnr_, mask_ratio
def epe(flow1, flow2):
mse = np.abs(flow1 - flow2)
return np.mean(mse)
def crop_function(arr, crop_size_H=1, crop_size_W=1):
W = arr.shape[3]
W_surround = (W - crop_size_W)/2
H = arr.shape[2]
H_surround = (H - crop_size_H)/2
output = arr[:,:,H_surround:-H_surround,W_surround:-W_surround]
return output
def my_imshow(image):
if image.shape[1] == 3:
plt.imshow(np.squeeze(np.transpose(image, axes=(0,2,3,1))), vmin=-0, vmax=1, interpolation="nearest")
elif image.shape[1] == 1:
plt.imshow(np.squeeze(np.transpose(image, axes=(0,2,3,1))), vmin=-0, vmax=1, interpolation="nearest", cmap='Greys')
def grayscale(img):
dst = np.zeros((img.shape[0],1,img.shape[2],img.shape[3]), dtype=np.float32)
dst[:,0,:,:] = 0.299 * img[:,0,:,:] + 0.587 * img[:,1,:,:] + 0.114 * img[:,2,:,:]
dst = np.repeat(dst, 3, axis = 1)
return dst
def blend(img1,img2,alpha = 0.5):
# if alpha > 1.5:
# alpha = 1.0
# if alpha < 0.5:
# alpha = 0.5
return img1 * alpha + img2 * (1-alpha)
def normalize_image_LF(img):
# normalize image such that every pixels follow normal distribution (the range of the original image is [0,1])
# for ImageNet
# imagenet_mean = np.array([0.485, 0.456, 0.406], dtype=np.float32).reshape((3,1,1))
# imagenet_var = np.array([0.229, 0.224, 0.225], dtype=np.float32).reshape((3,1,1))
# for LF dataset
imagenet_mean = np.array([0.179, 0.179, 0.179], dtype=np.float32).reshape((3,1,1))
imagenet_var = np.array([0.217, 0.217, 0.217], dtype=np.float32).reshape((3,1,1))
img_new = (img - imagenet_mean) / imagenet_var
# use alex net normalization
return img_new
def inverse_normalize_image_LF(img):
# for ImageNet
# imagenet_mean = np.array([0.485, 0.456, 0.406], dtype=np.float32).reshape((3,1,1))
# imagenet_var = np.array([0.229, 0.224, 0.225], dtype=np.float32).reshape((3,1,1))
# for LF dataset
imagenet_mean = np.array([0.179, 0.179, 0.179], dtype=np.float32).reshape((3,1,1))
imagenet_var = np.array([0.217, 0.217, 0.217], dtype=np.float32).reshape((3,1,1))
img_new = img*imagenet_var + imagenet_mean
# use alex net normalization
return img_new
def upsampling(img=None ,scale = 2):
img = np.asarray(img*255.0,dtype = np.uint8)
W = img.shape[3]*scale
H = img.shape[2]*scale
img_out = np.zeros((img.shape[0],img.shape[1],H,W),dtype=np.uint8)
if(img.ndim == 4):
img_out[:,0,:,:] = cv2.resize(img[0,0,:,:],(W,H),interpolation=cv2.INTER_CUBIC)
img_out[:,1,:,:] = cv2.resize(img[0,1,:,:],(W,H),interpolation=cv2.INTER_CUBIC)
img_out[:,2,:,:] = cv2.resize(img[0,2,:,:],(W,H),interpolation=cv2.INTER_CUBIC)
img_out = np.asarray(img_out,dtype = np.float32) / 255.0
# print img_out
return img_out
def load_model(T_param,filename):
f_l = open(filename, 'rb')
params_l = cPickle.load(f_l)
f_l.close()
for ind, p in enumerate(params_l):
T_param[ind].set_value(p.get_value())
# def load_params(T_param,filename):
# f_l = open(filename, 'rb')
# params_l = cPickle.load(f_l)
# f_l.close()
# for ind, p in enumerate(params_l):
# T_param[ind].set_value(p.get_value())
# def save_params(T_param,filename):
# f_params = open(filename, 'wb')
# cPickle.dump(T_param, f_params, protocol=cPickle.HIGHEST_PROTOCOL)
# f_params.close()
def yes_no_promt(str):
Join = raw_input(str)
while not(Join == 'y' or Join =='n'):
Join = raw_input(str)
return Join == 'y'
def load_params(T_params,filename): # save list of numpy array
f_l = open(filename, 'rb')
params_l = cPickle.load(f_l)
f_l.close()
# print params_l
# print T_params
for ind in range(len(params_l)):
print params_l[ind].shape, T_params[ind].get_value().shape
print len(T_params), len(params_l)
for ind in range(len(params_l)):
T_params[ind].set_value(params_l[ind])
# for ind, p in enumerate(T_params):
# p.set_value(params_l[ind])
def save_params(T_params,filename):
numpy_list = [p.get_value() for p in T_params]
f_params = open(filename, 'wb')
cPickle.dump(numpy_list, f_params, protocol=cPickle.HIGHEST_PROTOCOL)
f_params.close()
def load_update(T_updates,filename): # save list of numpy array
f_l = open(filename, 'rb')
params_l = cPickle.load(f_l)
f_l.close()
for ind, p in enumerate(T_updates.keys()):
p.set_value(params_l[ind])
def save_update(T_updates,filename):
numpy_list = [p.get_value() for p in T_updates.keys()]
f_params = open(filename, 'wb')
cPickle.dump(numpy_list, f_params, protocol=cPickle.HIGHEST_PROTOCOL)
f_params.close()
def save_snapshot(T_params,T_updates,filename):
save_params(T_params, filename+'.params')
save_update(T_updates, filename+'.updates')
def load_snapshot(T_params,T_updates,filename):
load_params(T_params, filename+'.params')
load_update(T_updates, filename+'.updates')
def augment_data(input_, mirror_left_right = True, rotate_90_time = 0):
if mirror_left_right:
# input_.flip(axis=3)
input_ = np.flip(input_,axis=3)
if rotate_90_time!=0:
# input_.rot90(input_, k=1, axes=(2, 3))
input_ = np.rot90(input_, k=1, axes=(2, 3))
return input_
def _load_img(filename):
img = Image.open(filename)
img = np.asarray(img, dtype=np.float32)
img = img / 255.0
# s = img.shape
# if (img.ndim==3 and s[2]==3):
# y,u,v = _ycc(img[:,:,0],img[:,:,1],img[:,:,2])
# img = np.array(y, dtype=np.float32)
return img
def selective_kernel(N,H,W):
# compute a 4D kernel which rearanges features
kernel = np.zeros((N*H*W,N,H,W),dtype = np.float32)
count = 0;
for n in range(N):
for h in range(H):
for w in range(W):
kernel[count,n,h,w] = 1.0
count = count + 1
return kernel
import matplotlib.pyplot as plt
def bilinear_kernel(size,num_kernels):
factor = (size + 1) // 2
if size % 2 == 1:
center = factor - 1.0
else:
center = factor - 0.5
og = np.ogrid[:size, :size]
K = np.asarray((1 - abs(og[0] - center) / factor) * (1 - abs(og[1] - center) / factor),dtype=np.float32)
print size, factor,center
Ks = np.zeros((num_kernels,num_kernels,size,size),dtype=np.float32)
for i in range(num_kernels):
Ks[i,i,:,:] = K
# normalize
# plt.imshow(K, interpolation="nearest")
# plt.show()
# print Ks,np.sum(K)
Ks=Ks/np.sum(K)
# print Ks
# exit()
return Ks
def gradient_kernel():
# compute a 4D kernel (4,1,5,5) which compute the gradient and second gradient
unit_temp = np.asarray([[0,0,0,0,0],
[0,0,0,0,0],
[0,0,1,0,0],
[0,0,0,0,0],
[0,0,0,0,0]], dtype = np.float32)
unit_temp2 = np.asarray([[0,0,0,0,0],
[0,1,1,1,0],
[0,1,1,1,0],
[0,1,1,1,0],
[0,0,0,0,0]], dtype = np.float32)/9.0
first_grad_temp = np.asarray([[0,0,0,0,0],
[0,0,1,0,0],
[0,0,0,0,0],
[0,0,-1,0,0],
[0,0,0,0,0]], dtype = np.float32)
second_grad_temp = np.asarray([[0,0, 1, 0,0],
[0,0, 0, 0,0],
[0,0,-2, 0,0],
[0,0, 0, 0,0],
[0,0, 1, 0,0]], dtype = np.float32)
kernel = np.zeros((4,1,5,5),dtype = np.float32)
kernel[0,0,:,:] = first_grad_temp
kernel[1,0,:,:] = first_grad_temp.T
kernel[2,0,:,:] = second_grad_temp
kernel[3,0,:,:] = second_grad_temp.T
# kernel[0,0,:,:] = unit_temp2
# kernel[1,0,:,:] = unit_temp2
# kernel[2,0,:,:] = unit_temp2
# kernel[3,0,:,:] = unit_temp2
return kernel
def identity_kernel(size,num_kernels):
# use identity kernel to initialize the delated network (suggested by the paper)
Ks = np.zeros((num_kernels,num_kernels,size,size),dtype=np.float32)
for i in range(num_kernels):
Ks[i,i,size/2,size/2] = 1.0
return Ks
def retify_image(img):
Img_res = img
Img_res = np.select([Img_res>1.0,Img_res<=1.0],[np.ones_like(Img_res), Img_res])
Img_res = np.select([Img_res<0.0,Img_res>=0.0],[np.zeros_like(Img_res), Img_res])
return Img_res
def compute_gradient_feature(patch,conv_mode = 'valid'):
# compute a 4D kernel (4,1,5,5) which compute the gradient and second gradient
unit_temp = np.asarray([[0,0,0,0,0],
[0,0,0,0,0],
[0,0,1,0,0],
[0,0,0,0,0],
[0,0,0,0,0]], dtype = np.float32)
unit_temp2 = np.asarray([[0,0,0,0,0],
[0,1,1,1,0],
[0,1,1,1,0],
[0,1,1,1,0],
[0,0,0,0,0]], dtype = np.float32)/9.0
first_grad_temp = np.asarray([[0,0,0,0,0],
[0,0,1,0,0],
[0,0,0,0,0],
[0,0,-1,0,0],
[0,0,0,0,0]], dtype = np.float32)
second_grad_temp = np.asarray([[0,0, 1, 0,0],
[0,0, 0, 0,0],
[0,0,-2, 0,0],
[0,0, 0, 0,0],
[0,0, 1, 0,0]], dtype = np.float32)
mode = conv_mode #'same' #'valid'
feat1 = correlate2d(patch,first_grad_temp, mode = mode)
feat2 = correlate2d(patch,first_grad_temp.T, mode = mode)
feat3 = correlate2d(patch,second_grad_temp, mode = mode)
feat4 = correlate2d(patch,second_grad_temp.T, mode = mode)
return feat1, feat2, feat3, feat4
| [
"noreply@github.com"
] | noreply@github.com |
87de427c3452fe0c23f480422f6330d34d52e58e | 14f8a0df9618688a84a73409fcb9c01bba4715c7 | /CBIR/src/fusion.py | 10863fba053ece9c0e42ba3b7497b6a820bdbe1c | [] | no_license | MaelleH/analyse-image | 2b8fd9605a556e9a30f0e8b9b6af90c2a73842fb | 02bc220c55743844b862e5a56b7a841342a32cb6 | refs/heads/master | 2023-02-22T15:56:31.420170 | 2021-01-30T18:38:52 | 2021-01-30T18:38:52 | 329,867,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,324 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
from evaluate import my_evaluate_class
from DB import Database
from color import Color
from edge import Edge
from gabor import Gabor
import numpy as np
import itertools
import os
d_type = 'd1'
depth = 30
#Ci-dessous, les différents modules recodés permettant de tester la fusion
feat_pools = ['color', 'edge'] #'gabor', 'daisy', 'hog', 'vgg', 'res']
# result dir
result_dir = 'result'
if not os.path.exists(result_dir):
os.makedirs(result_dir)
#Classe permettant de fusionner les classes choisies pour le traitement
class FeatureFusion(object):
def __init__(self, features):
assert len(features) > 1, "need to fuse more than one feature!"
self.features = features
self.samples = None
self.testSamples = None
def make_samples(self, db, db_name, verbose=True):
if verbose:
print("Use features {}".format(" & ".join(self.features)))
# Ici, un if a été ajouté afin de permettre aux deux bases de créer le sample. En effet, avant, une seule base était gardée en mémoire ce qui pouvait donner aux tests les valeurs de train.
if db_name == "train":
if self.samples is None:
feats = []
for f_class in self.features:
#Ajout de la classe dans les feats
feats.append(self._get_feat(db, db_name, f_class))
samples = self._concat_feat(db, feats)
self.samples = samples # cache the result
return self.samples
else:
if self.testSamples is None:
feats = []
for f_class in self.features:
feats.append(self._get_feat(db, db_name, f_class))
testSamples = self._concat_feat(db, feats)
self.testSamples = testSamples # cache the result
return self.testSamples
#fonction permettant de récuperer la classe en fonction du nom, suppression de gabor, hog, vgg, res et daisy car ils n'ont pas été adpatés
def _get_feat(self, db,db_name, f_class):
if f_class == 'color':
f_c = Color()
# elif f_class == 'daisy':
# f_c = Daisy()
elif f_class == 'edge':
f_c = Edge()
""" elif f_class == 'gabor':
f_c = Gabor()
elif f_class == 'hog':
f_c = HOG()
elif f_class == 'vgg':
f_c = VGGNetFeat()
elif f_class == 'res':
f_c = ResNetFeat()"""
return f_c.make_samples(db, db_name, verbose=False)
#Cette fonction permet d'ajouter les différentes classes dans feat et de mélanger leurs données
def _concat_feat(self, db, feats):
samples = feats[0]
delete_idx = []
for idx in range(len(samples)):
for feat in feats[1:]:
feat = self._to_dict(feat)
key = samples[idx]['img']
if key not in feat:
delete_idx.append(idx)
continue
assert feat[key]['cls'] == samples[idx]['cls']
samples[idx]['hist'] = np.append(samples[idx]['hist'], feat[key]['hist'])
for d_idx in sorted(set(delete_idx), reverse=True):
del samples[d_idx]
if delete_idx != []:
print("Ignore %d samples" % len(set(delete_idx)))
return samples
def _to_dict(self, feat):
ret = {}
for f in feat:
ret[f['img']] = {
'cls': f['cls'],
'hist': f['hist']
}
return ret
#cette fonction permet de decider à quelle classe appartient l'image
def evaluate_feats(db1, db2, N, feat_pools=feat_pools, d_type='d1', depths=[99, 50, 30, 10, 5, 3, 1]):
result = open(os.path.join(result_dir, 'feature_fusion-{}-{}feats.csv'.format(d_type, N)), 'w')
for i in range(N):
result.write("feat{},".format(i))
result.write("depth,distance,MMAP")
combinations = itertools.combinations(feat_pools, N)
for combination in combinations:
sommeBonnesReponsesCombinaison = 0
fusion = FeatureFusion(features=list(combination))
for d in depths:
#On récupère la prévision de classe dans la variable "prévision"
APs, prevision = my_evaluate_class(db1, db2, f_instance=fusion, d_type=d_type, depth=d)
sommeBonnesReponses = 0
for i in range(0, len(db_test)):
#print("Prevision {}, {}".format(db_test.data.img[i], prevision[i]))
if prevision[i] in db_test.data.img[i]: # Ayant trié les données de tests, je suis en mesure de savoir si mon modèle récupère la bonne réponses. Avec les données rentrées, la moyenne est de 78%
sommeBonnesReponses += 1
print("Moyennes bonnes réponses = {}".format(sommeBonnesReponses / len(db_test) * 100))
sommeBonnesReponsesCombinaison += sommeBonnesReponses / len(db_test)
cls_MAPs = []
for cls, cls_APs in APs.items():
MAP = np.mean(cls_APs)
cls_MAPs.append(MAP)
r = "{},{},{},{}".format(",".join(combination), d, d_type, np.mean(cls_MAPs))
print(r)
result.write('\n' + r)
print("Moyennes {} bonnes réponses tout depth= {}".format(",".join(combination), sommeBonnesReponsesCombinaison/len(depths) * 100))
result.close()
if __name__ == "__main__":
# On crée les deux bases, celle de test et celle de train
DB_train_dir_param = "../../ReseauDeNeurones/data/train"
DB_train_csv_param = "database/data_train.csv"
db_train = Database(DB_train_dir_param, DB_train_csv_param)
DB_test_dir_param = "../../ReseauDeNeurones/data/test_classés"
DB_test_csv_param = "database/data_test.csv"
db_test = Database(DB_test_dir_param, DB_test_csv_param)
# evaluate features double-wise
evaluate_feats(db_train, db_test, N=2, d_type='d1')
# evaluate features triple-wise
evaluate_feats(db_train,db_test, N=3, d_type='d1')
# evaluate features quadra-wise
evaluate_feats(db_train,db_test, N=4, d_type='d1')
# evaluate features penta-wise
evaluate_feats(db_train,db_test, N=5, d_type='d1')
# evaluate features hexa-wise
evaluate_feats(db_train,db_test, N=6, d_type='d1')
# evaluate features hepta-wise
evaluate_feats(db_train,db_test, N=7, d_type='d1')
# evaluate database
#featurefusion permet de mixer les classes pour améliorer le résultat final
fusion = FeatureFusion(features=['color', 'edge'])
#On donne a my evaluate f_instance qui est constitué des classes choisies
APs, prevision = my_evaluate_class(db_train, db_test, f_instance=fusion, d_type=d_type, depth=depth)
cls_MAPs = []
sommeBonnesReponses = 0
for i in range(0, len(db_test)):
print("Prevision {}, {}".format(db_test.data.img[i], prevision[i]))
if prevision[i] in db_test.data.img[i]: # Ayant trié les données de tests, je suis en mesure de savoir si mon modèle récupère la bonne réponses. Avec les données rentrées, la moyenne est de 78%
sommeBonnesReponses += 1
print("Moyennes bonnes réponses = {}".format(sommeBonnesReponses / len(db_test) * 100))
| [
"h.maelle.98@gmail.com"
] | h.maelle.98@gmail.com |
27a85b7a3b119d89d6fa012b492732d213266d64 | 1f1aed4bd2d752155759fec8510398c6756d9d57 | /main.py | 94e6a24a3da9ff334dc1e515e8f9db4ba4158dde | [] | no_license | lucaslulyeho/amazon-price-tracker | deb9a0a0fe0cf86a0118e656e0c5ef0d3059abaa | 03b8a180ace38c70507a5cd7d371f006cfd09055 | refs/heads/main | 2023-02-19T15:51:57.196806 | 2021-01-24T11:42:39 | 2021-01-24T11:42:39 | 332,434,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | import requests
import smtplib
from bs4 import BeautifulSoup
URL = input("Paste the amazon url of your item of choice: ")
price_cap = input("Enter your price cap for this item: ")
headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36",
"Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8,la;q=0.7"
}
response = requests.get(URL, headers=headers)
soup = BeautifulSoup(response.content, "html.parser")
item = soup.find(name="span", id="priceblock_ourprice")
# item = soup.find(name="span", id="priceblock_saleprice")
price = item.text
current_price = float(price.split("$")[1])
product_title = soup.find(id="productTitle")
product_title = product_title.text.strip()
if current_price <= float(price_cap):
sender_email = "******"
password = "*****"
recipient_email = "********"
with smtplib.SMTP("smtp.gmail.com") as connection:
connection.starttls()
connection.login(user=sender_email, password=password)
connection.sendmail(from_addr=sender_email,
to_addrs=f"{recipient_email}",
msg=f"Subject:Amazon price alert!\n\n{product_title} is now at ${current_price}!"
)
else:
print("Wait till next day")
| [
"noreply@github.com"
] | noreply@github.com |
a8aa7533e72b50c0243f8c3017f7007ef171f0b8 | f08c9a37b4fda0791a8eb43090b9c90d44557ef6 | /tcpproxy.py | 25c706fbde3608ee3bbc3c18ee4574efd54d7e43 | [] | no_license | yue123161/ImprovedIUPTIS | a2617934a5b9168ba0d08c5b108190fa261079ce | dc32bb474ad9ed9603bc9fa43907fc7d69ef65a1 | refs/heads/master | 2022-03-12T12:36:42.055521 | 2019-08-20T11:56:37 | 2019-08-20T11:56:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,721 | py | # MIT License
#
# Copyright (c) 2019 Mariano Di Martino
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# **********************************************************************************
# Realistically Fingerprinting Social Media Webpages in HTTPS Traffic
# Hasselt University/EDM/Flanders Make.
# Paper published by ACM ICPS, ARES 2019.
# Authors: Mariano Di Martino, Peter Quax, Wim Lamotte.
# Please cite the paper if you are using this source code.
# Licensed under: MIT License
# *****************************************************************************************
import os
import time
import sys
import socket
import threading
import select
import struct
from enum import Enum
from threading import Thread, Lock
allTLS = {}
mutex = Lock()
def addTLSRecord(id,tlsLen,direction,timestamp):
sslOverhead = 24
mutex.acquire()
global allTLS
if (id not in allTLS):
allTLS[id] = []
allTLS[id].append([tlsLen-sslOverhead,direction,timestamp])
mutex.release()
def clearRecords():
global allTLS
mutex.acquire()
allTLS = {}
mutex.release()
# Write all TLS records to tls_output.txt
def writeRecords():
mutex.acquire()
try:
os.remove("tls_output.txt")
except:
pass
f = open("tls_output.txt","w")
for sockname in allTLS:
conn = allTLS[sockname]
f.write("0 0 0\n")
for rec in conn:
f.write(str(int(rec[2]*1000000)) + " " + str(rec[0]) + " " + str(rec[1]) + "\n")
f.close()
mutex.release()
# Communication thread with ImprovedIUPTIS_COLLECT.py
def communicate():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 120)
sock.bind(('127.0.0.1', 82))
sock.listen(1)
while True:
client, address = sock.accept()
client.send(b"\xff")
print("Communication connection is accepted.")
while (True):
data = client.recv(1)
if (not data):
print("WARNING: Communication connection is closed by client.")
exit(1)
continue
else:
if (data == b"\x01"):
writeRecords()
client.send(b"\xff")
elif (data == b"\x02"):
clearRecords()
client.send(b"\xff")
else:
print("WARNING: Unknown command from client. Closing socket ...")
client.close()
exit(1)
break
class ThreadedServer(object):
def __init__(self, host, port, timewait, targetDomain):
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((self.host, self.port))
self.recvSize = 4096
#self.targetAddr = b".cdninstagram.com"
#self.targetAddr = b"pbs.twimg.com"
self.targetAddr = str.encode(targetDomain)
self.isBusy = False
self.timeWait = timewait
def listen(self):
self.sock.listen(40)
# Proxy will accept TCP connections.
while True:
client, address = self.sock.accept()
client.settimeout(120)
threading.Thread(target = self.listenToClient,args = (client,address)).start()
def handleStream(self,client,address,outSock):
clientData = b""
outSockData = b""
while 1:
writeSockets = []
if (clientData):
writeSockets.append(client)
if (outSockData):
writeSockets.append(outSock)
readSockets = [client, outSock]
readable, writeable, exceptional = select.select(readSockets, writeSockets, [],0)
for w in writeable:
if w is client:
if (clientData):
client.send(clientData)
clientData = b""
elif w is outSock:
if (outSockData):
outSock.send(outSockData)
outSockData = b""
for r in readable:
if r is client:
data = client.recv(self.recvSize)
if not data:
print("Client disconnected.")
outSock.close()
return True
outSockData += data
elif r is outSock:
data = outSock.recv(self.recvSize)
if not data:
print("Server disconnected.")
client.close()
return True
clientData += data
def send200Connect(self,client):
client.send(b"HTTP/1.1 200 Connection Established\r\nConnection: close\r\n\r\n")
def listenToClient(self, client, address):
response = b""
while True:
isConnected = False
data = client.recv(self.recvSize)
if data:
response += data
isConnected,outHostname,outPort = self.handleHTTPConnect(response)
if (isConnected):
self.send200Connect(client)
break
else:
print("Disconnected before HTTP CONNECT.")
client.close()
return False
# Is this request targeted to our address?
if (self.targetAddr in outHostname):
# Outgoing socket.
sc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sc.connect((outHostname, int(outPort)))
sc.setblocking(True)
client.setblocking(True)
self.isBusy = True
print("Handling TARGET host: " + str(outHostname))
val = self.handleIUPTISStream(client,address,sc)
self.isBusy = False
return val
else:
# Outgoing socket.
sc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sc.connect((outHostname, int(outPort)))
sc.setblocking(True)
client.setblocking(True)
print("Host: " + str(outHostname))
return self.handleStream(client,address,sc)
def handleHTTPConnect(self,strConn):
if (strConn.find(b"\r\n\r\n") == -1):
return False, "", ""
sIndex = strConn.find(b"\r\nHost: ")
midIndex = strConn.find(b"\r\n",sIndex+8)
eIndex = strConn.find(b":", sIndex+8,midIndex)
e2Index = strConn.find(b"\r\n", eIndex+1)
if (eIndex != -1):
outHostname = strConn[sIndex+8:eIndex]
outPort = strConn[eIndex+1:e2Index]
else:
outHostname = strConn[sIndex + 8:midIndex]
outPort = "80"
return True, outHostname, outPort
def handleIUPTISStream(self,client,address,outSock):
iupDel = IUPTISDelay(outSock.getsockname(),self.timeWait)
while 1:
# Run the core IUPTISDelay algorithm.
while (iupDel.update()):
pass
writeSockets = []
if (iupDel.hasDataForClient()):
writeSockets.append(client)
if (iupDel.hasDataForServer()):
writeSockets.append(outSock)
readSockets = [client, outSock]
readable, writeable, exceptional = select.select(readSockets, writeSockets, [],0)
for w in writeable:
if w is client:
print("Sending data to client.")
client.send(iupDel.getDataForClient())
elif w is outSock:
print("Sending data to server.")
outSock.send(iupDel.getDataForServer())
for r in readable:
if r is client:
data = client.recv(self.recvSize)
if not data:
print("Target client disconnected.")
self.isBusy = False
outSock.close()
return True
iupDel.sendToServer(data)
elif r is outSock:
data = outSock.recv(self.recvSize)
if not data:
print("Target server disconnected.")
self.isBusy = False
client.close()
return True
iupDel.sendToClient(data)
class SERVER_STATUS(Enum):
REQUEST_ON_ROUTE = 1
SENDING_RESPONSE = 2
WAITING_FOR_REQUEST = 3
WAITING_FOR_REQUEST_FIRST = 4
class IUPTISDelay:
def __init__(self,sockname,timeWait):
self.dstip = sockname[0]
self.srcport = int(sockname[1])
self.uniqueName = self.dstip + "_" + str(self.srcport)
self.clientData = b""
self.serverData = b""
self.serverTLSQueue = []
self.serverAllowedData = b""
self.clientAllowedData = b""
self.clientTLSQueue = []
#self.WAIT_COMPLETION = 0.5
self.WAIT_COMPLETION = timeWait
self.lastReceivedFromServer = time.time()
self.hasDataClient = False
self.hasDataServer = False
self.serverHasRequest = False
self.serverStatus = SERVER_STATUS.WAITING_FOR_REQUEST_FIRST
def hasDataForClient(self):
return (len(self.clientAllowedData) > 0)
def hasDataForServer(self):
return (len(self.serverAllowedData) > 0)
def sendToClient(self,data):
#print("Received data from server.")
self.clientData += data
def sendToServer(self,data):
#print("Received data from client.")
self.serverData += data
def getDataForClient(self):
if (self.hasDataForClient()):
backupData = self.clientAllowedData
self.clientAllowedData = b""
return backupData
def getDataForServer(self):
if (self.hasDataForServer()):
backupData = self.serverAllowedData
self.serverAllowedData = b""
return backupData
def update(self):
globChanged = False
# Handle data from client to server
while (len(self.serverData) > 5):
#Skip anything else then Application Data Records
if (self.serverData[0:3] == b"\x16\x03\x01" or self.serverData[0:3] == b"\x16\x03\x03" or
self.serverData[0:3] == b"\x14\x03\x03" or self.serverData[0:3] == b"\x15\x03\x03"):
tlsLen = struct.unpack(">H", self.serverData[3:5])[0]
# Make sure we have enough data to queue the complete TLS Record.
if (len(self.serverData) >= tlsLen + 5):
self.serverTLSQueue.append([tlsLen, self.serverData[:tlsLen + 5], False])
self.serverData = self.serverData[tlsLen + 5:]
print("Queuing non-AppData for server.")
globChanged = True
else:
break
# Extract Application Data Records
elif (self.serverData[0:3] == b"\x17\x03\x03"):
tlsLen = struct.unpack(">H", self.serverData[3:5])[0]
# Make sure we have enough data to queue the complete TLS Record.
if (len(self.serverData) >= tlsLen + 5):
self.serverTLSQueue.append([tlsLen,self.serverData[:tlsLen+5], True])
self.serverData = self.serverData[tlsLen + 5:]
print("Queuing AppData for server.")
globChanged = True
else:
break
else:
print("Error: Unknown TLS data from client :(. First 3 bytes: " + repr(self.serverData[0:3]))
exit(1)
# Handle data from server to client.
while (len(self.clientData) > 5):
# Skip anything else then Application Data Records
if (self.clientData[0:3] == b"\x16\x03\x01" or self.clientData[0:3] == b"\x16\x03\x03" or
self.clientData[0:3] == b"\x14\x03\x03" or self.clientData[0:3] == b"\x15\x03\x03"):
tlsLen = struct.unpack(">H", self.clientData[3:5])[0]
# Make sure we have enough data to queue the complete TLS Record.
if (len(self.clientData) >= tlsLen + 5):
self.clientTLSQueue.append([tlsLen, self.clientData[:tlsLen + 5], False])
self.clientData = self.clientData[tlsLen + 5:]
print("Queuing non-AppData for client.")
globChanged = True
else:
break
# Extract Application Data Records
elif (self.clientData[0:3] == b"\x17\x03\x03"):
tlsLen = struct.unpack(">H", self.clientData[3:5])[0]
# Make sure we have enough data to queue the complete TLS Record.
if (len(self.clientData) >= tlsLen+5):
self.clientTLSQueue.append([tlsLen,self.clientData[:tlsLen+5], True])
self.clientData = self.clientData[tlsLen + 5:]
print("Queuing AppData for client.")
globChanged = True
else:
break
else:
print("Error: Unknown TLS data from server :(. First 3 bytes: " + repr(self.clientData[0:3]))
exit(1)
hasChanged = True
while (hasChanged):
hasChanged = False
if (len(self.clientTLSQueue) > 0):
hasChanged = True
tlsData = self.clientTLSQueue[0]
tlsLen = tlsData[0]
tcpData = tlsData[1]
isAppData = tlsData[2]
self.clientAllowedData += tcpData
if (isAppData):
addTLSRecord(self.uniqueName, tlsLen, -1, time.time())
# See small TLS records as non-HTTP response data.
if (tlsLen > 160 and isAppData):
if (self.serverStatus == SERVER_STATUS.REQUEST_ON_ROUTE):
self.serverStatus = SERVER_STATUS.SENDING_RESPONSE
self.lastReceivedFromServer = time.time()
#print("Pushed data for client.")
del self.clientTLSQueue[0]
if (hasChanged):
globChanged = True
# Do we want to pass data from client to server?
hasChanged = True
while (hasChanged):
hasChanged = False
#print("SERVER_STATUS = " + repr(self.serverStatus))
if (len(self.serverTLSQueue) > 0):
# If we have a TLS Record that does not contain Application Data, then pass it right away.
if (self.serverTLSQueue[0][2] == False):
tlsData = self.serverTLSQueue[0]
tcpData = tlsData[1]
self.serverAllowedData += tcpData
del self.serverTLSQueue[0]
hasChanged = True
elif (self.serverStatus == SERVER_STATUS.WAITING_FOR_REQUEST_FIRST):
tlsData = self.serverTLSQueue[0]
tcpData = tlsData[1]
self.serverAllowedData = tcpData
self.serverStatus = SERVER_STATUS.WAITING_FOR_REQUEST
self.lastReceivedFromServer = time.time() #MAYBE?
del self.serverTLSQueue[0]
hasChanged = True
elif (self.serverStatus == SERVER_STATUS.WAITING_FOR_REQUEST):
tlsData = self.serverTLSQueue[0]
tcpData = tlsData[1]
self.serverAllowedData += tcpData
if (tlsData[2]):
addTLSRecord(self.uniqueName, tlsData[0], 1, time.time())
self.serverStatus = SERVER_STATUS.REQUEST_ON_ROUTE
del self.serverTLSQueue[0]
self.lastReceivedFromServer = time.time()
hasChanged = True
elif (self.serverStatus == SERVER_STATUS.REQUEST_ON_ROUTE):
if (time.time() - self.lastReceivedFromServer >= self.WAIT_COMPLETION):
# Too long that we got something BIG from the server. Pass another TLS record from client to server.
tlsData = self.serverTLSQueue[0]
tcpData = tlsData[1]
self.serverAllowedData += tcpData
if (tlsData[2]):
addTLSRecord(self.uniqueName, tlsData[0], 1, time.time())
if (len(self.serverTLSQueue[0][1]) >= 40):
self.lastReceivedFromServer = time.time()
#print("Passing another TLS Record from client to server RR.")
del self.serverTLSQueue[0]
hasChanged = True
elif (self.serverStatus == SERVER_STATUS.SENDING_RESPONSE):
if (time.time() - self.lastReceivedFromServer >= self.WAIT_COMPLETION): # or len(self.serverTLSQueue[0][1]) < 40):
# Too long that we got something BIG from the server. Pass another TLS record from client to server.
tlsData = self.serverTLSQueue[0]
tcpData = tlsData[1]
if (tlsData[2]):
addTLSRecord(self.uniqueName, tlsData[0], 1, time.time())
self.serverAllowedData += tcpData
if (len(self.serverTLSQueue[0][1]) >= 40):
self.lastReceivedFromServer = time.time()
del self.serverTLSQueue[0]
self.serverStatus = SERVER_STATUS.REQUEST_ON_ROUTE
#print("Passing another TLS Record from client to server SR.")
hasChanged = True
if (hasChanged):
#print("Pushed data for server.")
globChanged = True
return globChanged
if __name__ == "__main__":
if (len(sys.argv) < 3):
print("usage: python3 tcpproxy.py <time_waiting in seconds> <domain_name>")
exit(1)
#Running
print("Running communication thread ... ")
threading.Thread(target=communicate).start()
print("Listening ...")
ThreadedServer('', 81,float(sys.argv[1]),sys.argv[2]).listen()
| [
"mariano@mariano.in.edm.uhasselt.be"
] | mariano@mariano.in.edm.uhasselt.be |
7614c023c082ba42450ea0d171a4fbedfcda4a48 | e6076850afb8e8104eee3ee622df423f6268ff41 | /exploit/exp.py | 670faeba986496e717dce8eeb73f8366f8a37d2e | [] | no_license | GouBa-1/PDF_SHOW_MD5 | df877f4951ef115dea38c6bbf9a96453514d1e3c | 0f75691a9a08cb25ec3ff9109e8498842b0aa9e1 | refs/heads/main | 2023-01-28T15:00:56.287038 | 2020-12-11T11:02:10 | 2020-12-11T11:02:10 | 319,991,393 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,249 | py | import subprocess
import struct
import psutil
import time
import sys
import os
#前缀数据
Prefix=["./hex/f0.bin","./hex/f1.bin","./hex/f2.bin","./hex/f3.bin","./hex/f4.bin","./hex/f5.bin","./hex/f6.bin","./hex/f7.bin","./hex/f8.bin","./hex/f9.bin","./hex/f10.bin","./hex/f11.bin","./hex/f12.bin","./hex/f13.bin","./hex/f14.bin","./hex/f15.bin","./hex/f16.bin","./hex/f17.bin","./hex/f18.bin","./hex/f19.bin","./hex/f20.bin","./hex/f21.bin","./hex/f22.bin","./hex/f23.bin","./hex/f24.bin","./hex/f25.bin","./hex/f26.bin","./hex/f27.bin","./hex/f28.bin","./hex/f29.bin","./hex/f30.bin","./hex/f31.bin"]
#图片数据
Picture=["./hex/p0.bin","./hex/p1.bin","./hex/p2.bin","./hex/p3.bin","./hex/p4.bin","./hex/p5.bin","./hex/p6.bin","./hex/p7.bin","./hex/p8.bin","./hex/p9.bin","./hex/pa.bin","./hex/pb.bin","./hex/pc.bin","./hex/pd.bin","./hex/pe.bin","./hex/pf.bin"]
b_name=""
def get_last(data):
addr=len(data)-1
#print(addr)
while(addr>0):
#print(hex(data[addr]))
if data[addr]==0xfe:
tmp_addr=addr-1
if data[tmp_addr]==0xff:
return tmp_addr
addr-=1
def unicoll_one(cnt):
#prefix_stream 以二进制形式打开prefix
#predix_data 读取prefix_stream的所有数据
#prefix_len 获得prefix_data数据长度
#F是用来检测上次是否超时,默认为0,超时为1
F=0
if sys.argv[2]:
C=int(sys.argv[2])
else:
C=0
while C<=15:
Cnt=str(C)
b_name=str(cnt)+"_x.bin"
bb_name=str(cnt)+"_x_c.bin"
bbb_name=str(cnt)+'_x_last.bin'
#如果上次超时,则删去上次生成的文件
if F==1:
F=0
open(b_name,"wb").write(open(bbb_name,'rb').read())
os.system("rm -rf "+str(cnt)+'_'+str(C+1)+".bin")
for i in range(C):
open(str(cnt)+"_"+str(i)+".bin","wb").write(open(str(cnt)+"_"+str(i)+"_c.bin","rb").read())
last_data=open(bb_name,'rb').read()
open(b_name,'wb').write(last_data)
if Cnt=='0':
os.system("cat "+b_name)
print(b_name)
os.system("cat "+bb_name)
print(bb_name)
prefix=Prefix[cnt]
if cnt==0:
b=open(prefix,"rb").read()
if cnt!=0:
bb=str(cnt-1)+"_x.bin"
b=open(bb,"rb").read()
b+=open(prefix,"rb").read()
open(b_name,"wb").write(b)
open(bb_name,"wb").write(b)
for i in range (1):
if C>0:
open(bbb_name,"wb").write(open(bb_name,'rb').read())
prefix=b_name
picture=Picture[int(Cnt)]
prefix_stream=open(prefix,"rb")
prefix_data=prefix_stream.read()
open(bb_name,"wb").write(prefix_data)
prefix_len=len(prefix_data)
prefix_stream.close()
#获得最后一个0xFFFE的地址
last_addr=get_last(prefix_data)
#print("last fffe is in "+str(last_addr))
#获得长度的地址,以及长度
len1_addr=last_addr+2
len2_addr=last_addr+3
c_len=prefix_data[len1_addr]*0x100+prefix_data[len2_addr]
#计算下一次0xFFFE应该出现的位置,除非将出现在x*0x80+7地址的位置,否则都是不符合下一次构造的
#处理:直接开始用\x00填充到下一次0xFFFE,并且计算当前len1_addr到下一次符合的位置的距离
#print(hex(len1_addr))
#print(hex(c_len))
next_chunk=len1_addr+c_len#下一个chunk的位置
if (next_chunk-7)%0x80!=0:
prefix_data+=(b'\x00'*(-(len(prefix_data)-len1_addr)+c_len))
prefix_data+=(b'\xff\xfe')#直接跑到下一个chunk,开始布置
#下一次构造注释块的长度
#通过next_chunk来算当前chunk在0x80中的位置
#next_chunk%80
#print(hex(next_chunk))
#print(hex(next_chunk%0x80))
tmp=0x80-(next_chunk%0x80)-2
#print(hex(tmp))
pad_len=bytes([tmp])
#print(pad_len)
prefix_data+=(b'\x00'+pad_len)
prefix_data+=b'\x00'*(tmp-2)
#如果数据长度不能整除0x80, 则进行补全
prefix_len=len(prefix_data)
if prefix_len%0x80!=0:
tmp_len=prefix_len%0x80
#print(hex(int(tmp_len)))
tmp_len=0x80-tmp_len
tmp_len=int(tmp_len)
#print(hex(tmp_len))
for cnt in range(tmp_len):
prefix_data+=b'\x00'
#补入前缀
pre=b"\x00"*7+b'\xff'+b'\xfe'+b'\x00'+b'\x80'+b'\x00'*6
prefix_data+=pre
prefix_stream=open(b_name,"wb")
for c in prefix_data:
s=struct.pack("B",c)
prefix_stream.write(s)
prefix_stream.close()
shell_1="../scripts/poc_no.sh "+b_name
#print(shell_1)
#os.system(shell_1)
parent=subprocess.Popen(shell_1,shell=True)
for _ in range(300): # 250 seconds
if parent.poll() is not None: # process just ended
break
time.sleep(1)
else:
# the for loop ended without break: timeout
parent = psutil.Process(parent.pid)
for child in parent.children(recursive=True): # or parent.children() for recursive=False
child.kill()
parent.kill()
#如果超时F置1,退出当前循环
print("TIME OUT")
F=1
if C>0:
C-=2
else:
C-=1
break
print("WxWWxxWWWxxxWWWWxxxxWWWWWW*+=Finish=+*WWWWWxxxxWWWWxxxWWWxxWWxW")
#一次碰撞完毕
#collision1为小,collision2为大
collision1=open("./collision1.bin","rb")
collision2=open("./collision2.bin","rb")
name1=b_name
name2=str(cnt)+"_"+Cnt+".bin"
os.system("cat "+name2)
c1_data=collision1.read()
c2_data=collision2.read()
collision1.close()
collision2.close()
uncertain=open(name1,"wb")
certain=open(name2,"wb")
#print(name1)
#print(name2)
c_addr=len(c1_data)
c_addr=c_addr-0x77
#插入先插入0x80用的FFFE
payload=b'\x00'*9
payload+=b'\xff\xfe\x02\x00'
payload+=b'\x00'*12
payload+=b'\x00'*16*0xf
pict=open(picture,'rb')
pict_data=pict.read()
payload+=pict_data
payload+=b'\xff\xfe'
#获得距离结尾的长度
pp_tmp=c1_data+payload
pp_len=len(pp_tmp)%0x3300#长度所处地址
pp_len=0x3300-pp_len
p1=pp_len/0x100
p2=pp_len%0x100
print("p1: "+hex(int(p1)))
print("p2: "+hex(int(p2)))
p1=bytes([int(p1)])
p2=bytes([int(p2)])
payload+=(p1+p2)
begin=0xb
payload_len=len(payload)
p_len=payload_len-begin
p_len=0x200-p_len
payload+=p_len*b'\x00'
payload+=b'\xff\xfe\x00\x10'
c1_data+=payload
c2_data+=payload
uncertain.write(c1_data)
certain.write(c2_data)
uncertain.close()
certain.close()
#完整一次结束,则将新的碰撞块替换旧的
if C>0:
data=get_padding(c1_data)
for old in range (C):
old_name=str(cnt)+"_"+str(old)+".bin"
old_name_c=str(cnt)+"_"+str(old)+"_c.bin"
print(old_name+" will change!")
old_data=open(old_name,"rb").read()
print(str(old))
print(str(C-1))
if old==(C-1):
command1="cat "+old_name_c
os.system(command1)
open(old_name_c,"wb").write(old_data)
old_data+=data
open(old_name,"wb").write(old_data)
C=C+1
break
def get_padding(data):
cnt=len(data)-1
tmp=cnt
print("aa==========================================================================================================================================================aa")
#for c in data:
# print(data)
while 1:
#if data[cnt-1]==0x10 and data[cnt-2]==0x00 and data[cnt-3]==0xfe and data[cnt-4]==0xff:
if data[cnt-1]==0x10 and data[cnt-2]==0x00 and data[cnt-3]==0xfe and data[cnt-4]==0xff:
print(hex(cnt))
break
cnt=cnt-1
cnt=cnt
data=data[cnt:]
#print(padding)
return data
if sys.argv[1]:
cnt=int(sys.argv[1])
else:
cnt=0
unicoll_one(cnt)
x_name=str(cnt)+'_x.bin'
x_data=open(x_name,'rb').read()
x_len=len(x_data)
x_len%=0x3300
x_padding_len=0x3300-x_len-3
pre=int(x_padding_len/0x100)
tai=int(x_padding_len%0x100)
padding=(bytes([pre])+bytes([tai]))
padding+=b'\x00'*(x_padding_len-1)
padding+=b'\xff\xd9\x0a\x65\x6e\x64\x73\x74\x72\x65\x61\x6d\x0a\x65\x6e\x64\x6f\x62\x6a\x0a'
for i in range(16):
old_name=str(cnt)+'_'+str(i)+'.bin'
print(old_name)
old_data=open(old_name,"rb").read()
old_data=old_data[:-2]
old_data+=padding
open(old_name,"wb").write(old_data)
old_name=str(cnt)+'_x.bin'
print(old_name)
old_data=open(old_name,"rb").read()
old_data=old_data[:-2]
old_data+=padding
open(old_name,"wb").write(old_data) | [
"noreply@github.com"
] | noreply@github.com |
62b8d8fe84aecc31a1b43a18a408a4002daea6e4 | 77a6884ca829b56f5a39d66223dbcb6786838469 | /Object+Detection+Realtime (1).py | 142abf505912c57e8e1bdf54f664cfa06ce10a29 | [] | no_license | nickname0153/workshop-by-hvzn | 1b0a521c479951c59b734072016439612505bbd6 | 001dccb8bd382c6faf7d0f1f72932c9b6ca86419 | refs/heads/master | 2020-08-26T19:36:56.995018 | 2018-11-16T07:46:53 | 2018-11-16T07:46:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,018 | py |
# coding: utf-8
# In[1]:
import numpy as np
import os
import six.moves.urllib as urllib
import sys
import tarfile
import tensorflow as tf
import zipfile
# In[2]:
from collections import defaultdict
from io import StringIO
from matplotlib import pyplot as plt
from PIL import Image
# In[16]:
import cv2
cap = cv2.VideoCapture(0)
# In[5]:
# This is needed since the notebook is stored in the object_detection folder.
sys.path.append("..")
# In[7]:
from utils import label_map_util
from utils import visualization_utils as vis_util
# In[8]:
# What model to download.
MODEL_NAME = 'ssd_mobilenet_v1_coco_11_06_2017'
MODEL_FILE = MODEL_NAME + '.tar.gz'
DOWNLOAD_BASE = 'http://download.tensorflow.org/models/object_detection/'
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_CKPT = MODEL_NAME + '/frozen_inference_graph.pb'
# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = os.path.join('data', 'mscoco_label_map.pbtxt')
NUM_CLASSES = 90
# In[7]:
opener = urllib.request.URLopener()
opener.retrieve(DOWNLOAD_BASE + MODEL_FILE, MODEL_FILE)
tar_file = tarfile.open(MODEL_FILE)
for file in tar_file.getmembers():
file_name = os.path.basename(file.name)
if 'frozen_inference_graph.pb' in file_name:
tar_file.extract(file, os.getcwd())
# In[9]:
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
# In[10]:
label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
# In[11]:
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
# In[12]:
# If you want to test the code with your images, just add path to the images to the TEST_IMAGE_PATHS.
PATH_TO_TEST_IMAGES_DIR = 'test_images'
TEST_IMAGE_PATHS = [ os.path.join(PATH_TO_TEST_IMAGES_DIR, 'image{}.jpg'.format(i)) for i in range(1, 3) ]
# Size, in inches, of the output images.
IMAGE_SIZE = (12, 8)
# In[ ]:
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
while True:
ret, image_np = cap.read()
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
scores = detection_graph.get_tensor_by_name('detection_scores:0')
classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# Actual detection.
(boxes, scores, classes, num_detections) = sess.run(
[boxes, scores, classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
np.squeeze(boxes),
np.squeeze(classes).astype(np.int32),
np.squeeze(scores),
category_index,
use_normalized_coordinates=True,
line_thickness=8)
cv2.imshow('object detection', cv2.resize(image_np, (1000,800)))
if cv2.waitKey(25) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
| [
"32302970+afif2100@users.noreply.github.com"
] | 32302970+afif2100@users.noreply.github.com |
1f6f0c2490ff3734c1773283e4029d90c7b09ae1 | d7c44522f934c7ab3ad9766cc01440fe9300c001 | /cogs/meta.py | 8bc90892d1752cfb76d67dae4e78f4b1d3ca152d | [
"MIT"
] | permissive | cretoria/Rockbot | 4f486c22d6bda71e3f6cff75db554681d306f030 | b7f184013458bf505392be62203effbf2d35ac7a | refs/heads/master | 2021-06-29T15:21:54.099844 | 2020-12-05T11:15:23 | 2020-12-05T11:15:23 | 141,432,659 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,706 | py | import discord
import asyncio
import json
import urllib
import aiohttp
import random
import collections
import re
from datetime import datetime as dt
from itertools import islice
from discord.ext import commands
from PIL import Image, ImageDraw, ImageFont
UTOPIA_URL = "http://utopia-game.com/wol/game/kingdoms_dump/?key=l1FdkNfdklAs"
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
shameless_json = '/home/pi/cretobot/shameless77.json'
gp_supporters = ['calamity',
'marrek',
'cheese',
'dnomder',
'selwonk'
]
gp_needers = ['Beer Bear',
'Bully Bear',
'Cunning Linguist Bear',
'Discrete Bear',
'Dont Care Bear',
'Dumpster Fire Bear',
'Faceless Bear',
'Gentle Assassin Bear',
'Grumpy Bear',
'Illuminati Bear',
'Jeer Bear',
'Karen Bear',
'Nair Bear',
'Nice Personality Bear',
'No Chill Bear',
'Only Fans Bear',
'Sadistic Bear',
'Totally Shameless Bear',
'Troll Bear'
]
class Meta(commands.Cog):
def __init__(self,bot):
self.bot = bot
self.saved_data = None
# self.bot.session = aiohttp.ClientSession(loop=self.bot.loop)
async def get_data(self):
# If saved data is None we haven't fetched anything, thanks to short-
# circuiting we can check the index in the or clause safely
# If 15 minutes has passed since the last data fetched, we need to update
# to make sure it's fresh
if self.saved_data is None or (dt.utcnow() - dt.strptime(self.saved_data[0],
DATETIME_FORMAT)).total_seconds() // 60 > 15:
await self.download_data()
return self.saved_data
async def download_data(self):
async with self.bot.session.get(UTOPIA_URL) as r:
data = await r.json()
self.saved_data = data
# Command to add Dunce role to person, including increasing counter in KD json
@commands.command(aliases=['dunce','givedunce'])
@commands.has_role('leaders')
async def _dunce(self, ctx, duncee, *, dunce_reason: str=None):
with open(shameless_json, 'r') as f:
data = json.load(f)
member = ctx.guild.get_member(int(duncee.strip('<@!>')))
dunce_role = discord.utils.get(ctx.guild.roles, id=739615380787429516)
await member.add_roles(dunce_role, reason=dunce_reason)
### Update individual member dunce count
for p_name, p_info in data.items():
if p_name != "misc" and p_info["discord.id"] == member.id:
p_info['dunce_count'] += 1
dunce_count=p_info['dunce_count']
### Send embed containing dunce post to #dunce-corner
dunce_channel = self.bot.get_channel(759132744252391505)
embed = discord.Embed(title='SOMEONE HAS BEEN DUNCED!', color=0x96b403)
embed.set_thumbnail(url='https://cdn.discordapp.com/attachments/404868940683149332/743549927799390228/rock-dunce.jpg')
embed_text = ('Here\'s why: __{}__ \n'
'Number of times {} has been dunced this age: **{}**'.format(
dunce_reason, member.name, dunce_count))
embed.add_field(name= '{} is a dunce for 12 hours!'.format(member.name), value = embed_text)
# value = 'Here\'s why: {}\n'
# 'Number of times {} has been a dunce this age: **{}**'.format(
# dunce_reason, member.name, p_info['dunce_count']))
await ctx.send(embed=embed)
await dunce_channel.send(embed=embed)
with open(shameless_json, 'w') as f:
json.dump(data, f, indent=4, sort_keys=True)
await asyncio.sleep(43200)
await member.remove_roles(dunce_role)
# Notify if person doens't have necessary role
@_dunce.error
async def _dunce_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
await ctx.send('Go sit your candy-ass down! You\'re not the Monarch or a Steward...')
# Command to calcualte books needed for desired science effect
@commands.command(aliases=['books','booksneeded','books_needed'])
async def _books_needed(self, ctx, chosen_cat: str=None, chosen_inc: str=None):
author = ctx.author
# Cal doesn't get to use this nifty tool
if author.id == 217085842307547136:
return await ctx.send('Sorry Cal, this tool is only available for those who'
' believe in the benfits of science. Please fuck off'
' and try again later. Have a nice day.')
# Define the science category multipliers
# These are as of Age 87 from the Wiki
cat_list = collections.OrderedDict([
('alchemy',('Income',0.0724)),
('tools',('Building Effectiveness',0.0524)),
('housing',('Population Limit',0.0262)),
('production',('Food & Rune Production',0.2492)),
('bookkeeping',('Wage Reduction',0.068)),
('artisan',('Construction Time & Cost',0.0478)),
('strategy',('Defensive Military Efficiency',0.0367)),
('siege',('Battle Gains',0.0262)),
('tactics',('Offensive Military Efficiency',0.0418)),
('valor',('Military Train Time & Dragon Slaying',0.0582)),
('heroism',('Draft Speed & Cost',0.0418)),
('crime',('Thievery Effectiveness',0.1557)),
('channeling',('Magic Effectiveness',0.1875)),
('shielding',('Reduced Damage from Enemy Ops',0.0314)),
('cunning',('Increased Ops Damage',0.0472)),
('invocation',('Ritual Rune Cost Reduction',0.0622))])
if chosen_cat != None and chosen_inc == None:
return await ctx.send('If you\'re trying to use the shortcut method, the syntax is:'
'```!books [number for science category] [number for increase percent]```')
elif chosen_cat != None and chosen_inc != None:
#Determine if the race mod should be changed by checking user against Shameless json data
with open(shameless_json, 'r+') as f:
data = json.load(f)
for p_name, p_info in data.items():
if p_name != 'misc' and p_info['discord.id'] == author.id:
if p_info['race'] == 'Human':
race_mod = 1.1
race = 'Human'
# elif p_info['race'] == 'Undead':
# race_mod = 0.5
# race = 'Undead'
else:
race_mod = 1
race = None
try:
val = int(chosen_cat)
except ValueError:
await ctx.send('```Science category needs to be a whole number.\n'
'Please start over.```')
if not 1 <= int(chosen_cat) <= 16:
return await ctx.send('```Pick 1-16.\nPlease start over.```')
try:
chosen_inc = float(chosen_inc)
except ValueError:
await ctx.send('```Percentage increase needs to be a number, e.g. 5, 10.8, 20.\n'
'Please start over.```')
for i,(cat,(desc,mult)) in enumerate(cat_list.items(),start=1):
if i==int(chosen_cat):
needed = round((float(chosen_inc)/(mult*race_mod))**2.125)
return await ctx.send('{:,} books needed in {} for a desired effect of {}%.'.format(needed,cat,chosen_inc))
if race != None:
await ctx.send('*Note this includes your {} science modifier.*'.format(race))
else:
pass
await ctx.send('\n'.join('{} - {} ({})'.format(i,x.capitalize(),y)
for i, (x,(y,z)) in enumerate(cat_list.items(),start=1)))
# Define other variables of the science formula
race_mod = 1
multiplier = 1
def check(m):
return m.author == author
#Determine if the race mod should be changed by checking user against Shameless json data
with open(shameless_json, 'r+') as f:
data = json.load(f)
for p_name, p_info in data.items():
if p_name != 'misc' and p_info['discord.id'] == author.id:
if p_info['race'] == 'Human':
race_mod = 1.1
race = 'Human'
# elif p_info['race'] == 'Undead':
# race_mod = 0.5
# race = 'Undead'
else:
race_mod = 1
race = None
#Get the science category to calculate books for
await ctx.send('Which science category would you like me to calculate the needed books for?\n'
'*(Enter the number corresponding to your desired choice (1-16).)*')
try:
sci_cat = await self.bot.wait_for('message', timeout=30.0, check=check)
except asyncio.TimeoutError:
await ctx.send('```Too slow choosing jabroni, try !books again when'
' you\'ve made up your mind!```')
else:
chosen_cat = sci_cat.content
print(chosen_cat)
try:
val = int(chosen_cat)
except ValueError:
await ctx.send('```Needs to be a number.```')
if not 1 <= int(chosen_cat) <= 16:
return await ctx.send('```Pick 1-16.```')
await ctx.send("What is your target effect? *i.e. 15% increase, then enter 15*")
try:
sci_inc = await self.bot.wait_for('message', timeout=30.0, check=check)
except asyncio.TimeoutError:
await ctx.send('```too slow```')
else:
chosen_inc = sci_inc.content
try:
chosen_inc = float(chosen_inc)
except ValueError:
await ctx.send('```Needs to be a number.```')
for i,(cat,(desc,mult)) in enumerate(cat_list.items(),start=1):
if i==int(chosen_cat):
needed = round((chosen_inc/(mult*race_mod))**2.125)
await ctx.send('{:,} books needed in {} for a desired effect of {}%.'.format(needed,cat,chosen_inc))
if race != None:
await ctx.send('*Note this includes your {} science modifier.*'.format(race))
### Science Bonus = ( # of Books in Type )^(1/2.125) * Science Multiplier * Race Mod
@commands.command()
async def roster(self, ctx):
await ctx.send('https://cdn.discordapp.com/attachments/435902101810577458/757668183044915290/image0.jpg')
# e = discord.Embed(title='Shameless Kingdom Roster', color=0x000000)
# e.set_image(url='https://media.discordapp.net/attachments/404868940683149332/618891889390780431/roster_82_9-4-19.jpg')
# await ctx.send(content='Here you go you candy-ass jabroni...', embed=e)
# await ctx.send('I\'d be happy to serve up our current KD roster for you to reference.'
# '\n\nBefore I do, though, please answer the following question:using the '
# 'the number of the choice that best describes you:\n\n'
# '1. I feel like a beautiful princess with long, golden locks.\n'
# '2. I feel like strapping on my boat shoes and spending a day at sea.\n'
# '3. I feel like celebrating my favorite holiday, All Hallows\' Eve.\n'
# '4. On Wednesdays we wear pink.\n'
# '5. I feel devoid of any excitement in my life.\n'
# '6. I like surprises. Life is better when you don\'t know what\'s around the corner.')
# author = ctx.author
# def check(m):
# return m.author == author
# roster_choice = await self.bot.wait_for('message', timeout=60.0, check=check)
# if roster_choice.content == '1':
# await ctx.send('https://media.discordapp.net/attachments/435902101810577458/633753890549923851/SnipImage.JPG\n'
# 'https://media.giphy.com/media/W5yshKbH5wrwA/giphy.gif')
# elif roster_choice.content == '2':
# await ctx.send('https://media.discordapp.net/attachments/404868940683149332/633736850305056778/roster-832.jpg')
# elif roster_choice.content == '3':
# await ctx.send('https://media.discordapp.net/attachments/435902101810577458/633749734208700437/SnipImage.JPG')
# elif roster_choice.content == '4':
# await ctx.send('https://media.discordapp.net/attachments/435902101810577458/633749678642429952/SnipImage.JPG')
# elif roster_choice.content == '5':
# await ctx.send('https://media.discordapp.net/attachments/435902101810577458/633749607116963868/SnipImage.JPG')
# elif roster_choice.content == '6':
# random_url = ['https://media.discordapp.net/attachments/435902101810577458/633753890549923851/SnipImage.JPG \n https://media.giphy.com/media/W5yshKbH5wrwA/giphy.gif',
# 'https://media.discordapp.net/attachments/404868940683149332/633736850305056778/roster-832.jpg',
# 'https://media.discordapp.net/attachments/435902101810577458/633749734208700437/SnipImage.JPG',
# 'https://media.discordapp.net/attachments/435902101810577458/633749678642429952/SnipImage.JPG',
# 'https://media.discordapp.net/attachments/435902101810577458/633749607116963868/SnipImage.JPG']
# random_roster = random.choice(random_url)
# await ctx.send('{}'.format(random_roster))
# else:
# await ctx.send('Sorry jabroni...you seem to have difficulty understanding simple numbers.')
# Command to set our current KD
@commands.command()
@commands.has_any_role('admin')
async def setkd(self, ctx, our_kd):
fresh_data = await self.get_data()
for d in fresh_data[1:-1]:
if d.get("loc") == our_kd:
data = dict([x["name"], {"nw": x["nw"], "acres": x["land"],
"race": x["race"], "honor": x["honor"], "discord.id":
0, "discord.name": "", "bounces": 0, "points": 0, "dunce_count": 0,"whois": "", "last_bounce": ""}]
for x in d.get("provinces"))
data["misc"] = {}
data["misc"]["our_KD"] = our_kd
with open ("/home/pi/cretobot/shameless77.json", "w+") as f:
json.dump(data, f, indent=4, sort_keys=True)
await ctx.send('Our KD set to ({}) and all province info populated.'
.format(our_kd))
# Notify if person doens't have necessary role
@setkd.error
async def setkd_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
await ctx.send('You do not have the necessary permissions for this command.')
'''
Piggyback on munkbot's !setprov to store discord ID in dragonScript.json
along with the respective province name
'''
@commands.command(aliases=['provset'])
async def setprov(self, ctx, *, prov_name):
with open(shameless_json, 'r') as f:
data = json.load(f)
for p_name, p_info in data.items():
if p_name == prov_name:
p_info["discord.id"] = ctx.author.id
p_info["discord.name"] = ctx.author.name
# update KD json with data
with open(shameless_json, 'w') as f:
json.dump(data, f, indent=4, sort_keys=True)
return await ctx.send('Info updated to link your Discord user '
'info with Rockbot\'s KD info')
elif prov_name not in data.keys():
return await ctx.send('Invalid province name, jabroni.')
### Our own custom dice roller
@commands.command()
async def roll(self, ctx, *, dsides: int=None):
if dsides == None:
dsides = 20
await ctx.send('**{}** rolled {}.\n*(using a d{} :game_die:)*'.format(
ctx.message.author.name,random.randint(1,dsides),dsides))
@commands.command()
async def giphy(self, ctx, *, giphy_search: str):
giphy_search_urlify = giphy_search.replace(" ","-")
search_url = "https://api.giphy.com/v1/gifs/random?api_key=6w7qM8L6T0Z3OkwtUX1ahBhWLBK3A2Zs&tag="+giphy_search_urlify+"&rating=G"
print(search_url)
async with self.bot.session.get(search_url) as r:
data = await r.json()
return await ctx.send(data["data"]["url"])
@commands.command()
async def me(self, ctx):
with open(shameless_json, 'r') as f:
data = json.load(f)
for p_name, p_info in data.items():
if p_name != 'misc' and p_info["discord.id"] == ctx.author.id:
embed = discord.Embed(color=0x4169e1)
embed.set_author(name='Your Shameless User Info:')
embed.set_thumbnail(url=
"https://cdn.discordapp.com/avatars/{0.id}/{0.avatar}.png?size=1024"
.format(ctx.author))
embed.add_field(name= "Province Name: ", value = p_name, inline=True)
embed.add_field(name='Ruler: ', value=ctx.author.name, inline=True)
embed.add_field(name='Nobility: ', value=p_info["honor"], inline=True)
embed.add_field(name='Bounces: ', value=p_info['bounces'])
return await ctx.send(embed=embed)
#Embed that lists the primary bot commands useful for @leaders
@commands.command(aliases=['lh','helpleader','leaderhelp','leaders_help'])
async def leader_help(self, ctx):
embed = discord.Embed(color=0x3498db)
embed.title = "Common Leader Commands"
embed.add_field(name='__Using this guide__', value='With any of these, '
'you can use either . or ! to call the command. Whenever '
'there is [some text] listed in the syntax, you don\'t need '
'those square brackets.')
embed.add_field(name='__The Basics__', value='**Orders**\n'
'Adding an order: ```.addorder [order text]\ne.g. .addorder Raze Calamity```\n'
'Deleting an order: ```.delorder [order ID number]\ne.g. .delorder 22```\n'
'Deleting all orders: *careful this can\'t be undone* ```.delallorders```\n'
'**Events**\n'
'Adding an event: ```.addevent [mmmDDyrX] [event text]\ne.g. .addorder apr9yr4 wave date```\n'
'Deleting an event: ```.delevent [event ID number]```\n'
'Display events: ```.events```\n'
'**Ops Plan**\n'
'When a war is over, or if a wave doesn\'t pan out but ops are already loaded, use this to clear them:'
'```.resetopplan```')
embed.add_field(name='__Dragon Cost Calculator__', value='To determine the cost of a dragon against a '
'target kingdom, use: ```!fluffy cost [target KD] [emerald|ruby|amethyst|sapphire|topaz]\n'
'e.g. !fluffy cost 1:1 ruby```Rockbot will post the cost, and if you want to set this for '
'province funding amounts, the leader who called the command must type and enter "Yes" within '
'20 seconds or it will timeout.')
embed.add_field(name='__Rockbot Points System__', value='To give or remove points, anyone with a leader '
'role may do so. Use the command below, and note that "!points" will always be a valid '
'command tag, but we often use a theme-specific command as well such as "!hugs" or "!takeout".'
'```!points [@user] [number of points, lead with - to remove] [reason for points{optional}]\n'
'e.g. !points @Dev 5 intel\n'
'e.g. !points @Calamity -5 being a douche```')
return await ctx.send(embed=embed)
@commands.command()
async def support(self, ctx):
embed = discord.Embed(color=0x1abc9c)
embed.title = "Support Spells Cheatsheet"
embed.add_field(name='__Provs to check for Greater Protection__', value='\n'.join(gp_needers), inline=False)
embed.add_field(name='\n__REMINDER__', value='The best way to see if someone needs GP when there aren\'t spells'
'requested in the !request function is to check https://utopia-game.com/wol/game/kingdom_intel/8/3'
', and check to see if the provs listed above show GP in effect.')
await ctx.send(embed=embed)
@commands.command()
async def war_report(self, ctx):
report_contents = open('/home/pi/cretobot/war_report.txt').readlines()
chunk1 = report_contents[4:35]
chunk2 = report_contents[35:65]
chunk3 = report_contents[65:95]
chunk4 = report_contents[95:125]
chunk5 = report_contents[125:]
print(len(report_contents))
embed = discord.Embed(color=0xffff00)
embed.title = "Current War Report"
embed.description = report_contents[1]
embed.add_field(name='\u200b', value=''.join(chunk1), inline=False)
embed.add_field(name='\u200b', value=''.join(chunk2), inline=False)
embed.add_field(name='\u200b', value=''.join(chunk3), inline=False)
embed.add_field(name='\u200b', value=''.join(chunk4), inline=False)
embed.add_field(name='\u200b', value=''.join(chunk5), inline=False)
await ctx.send(embed=embed)
### Custom help command
@commands.command()
async def help(self, ctx, cmd):
if cmd.lower() == 'fluffy':
embed = discord.Embed(color=0xffff00)
embed.set_author(name='Help for your candy-ass')
embed.add_field(name='**`!fluffy`**', value='Use along with sub-commands to'
' calculate and set dragon fund and slay amounts.', inline=False)
embed.add_field(name='`!fluffy cost [x:y] [color]`', value='Takes given target KD '
'and dragon type to calculate dragon cost. To use this '
'command you must include a KD location, i.e. 4:13, and '
'dragon color (sapphire, gold, ruby, emerald). With '
'proper parameters used the dragon cost is shown, and '
'if you are in @leaders you are given the option to set '
'the dragon, which will populate the fund cost for each '
'province.')
embed.add_field(name='`!fluffy fund`', value='Lists the per-province'
' funding amounts.')
embed.add_field(name='`!fluffy slay`', value='Lists the per-province'
' slay points.')
elif cmd == 'points':
embed = discord.Embed(color=0xffff00)
embed.set_author(name='Help on the team points system for your candy-ass')
embed.add_field(name='`!teams | !teamlist | !listteams`', value=
'Shows the current makeup of the three teams.')
embed.add_field(name='`!mypoints`', value='Lists your team\'s current'
' number of points, as well as your individual points'
' for the age.')
embed.add_field(name='`!scoreboard`', value='Shows current points '
'for all teams, along with the team leaders.')
embed.add_field(name='`!points` (Leaders Only)', value='If you have '
'the @leaders> role, you can assign or '
'take away points using the following syntax:\n'
'`!points [@user] [number of points] [reason(optional)]'
'\n *Note: to remove points, just use "-".*')
embed.add_field(name='\u200b', value='A log of points given/taken '
'can be found in the <#499226632704229376> channel.')
else:
embed = discord.Embed(title="Oops", color=0xffff00)
embed.add_field(name='Need more info...', value=
'You need to include an existing command'
' after !help, i.e. `!help [command]`')
await ctx.send(embed=embed)
# Retrieves our KD location from json
@commands.command()
async def kd(self, ctx):
with open ("/home/pi/cretobot/shameless77.json", "r+") as f:
data = json.load(f)
our_kd = data["misc"]["our_KD"]
await ctx.send('Our KD is ({}).'.format(our_kd))
# Sets value in Shameless JSON file for the .whois trigger
@commands.command()
@commands.has_any_role('leaders', 'admin')
async def setwhois(self, ctx, member_to_change, *, new_whois: str=None):
print(member_to_change[0])
if member_to_change[0] != "<":
return await ctx.send("Please be sure you are using the correct format"
" ```!setwhois @user [URL or desired phrase]```")
if new_whois == None:
new_whois = ""
member_to_change = member_to_change.replace('!','')
member_stripped = ctx.guild.get_member(int(member_to_change.strip('<@>')))
print(member_stripped.id)
print(new_whois)
with open(shameless_json, 'r') as f:
data = json.load(f)
for p_name, p_info in data.items():
print(p_name)
if p_name != "misc" and p_info["discord.id"] == member_stripped.id:
p_info["whois"] = new_whois
with open(shameless_json, 'w') as f:
json.dump(data, f, indent=4, sort_keys=True)
return await ctx.send("Whois trigger successfully updated.")
else:
return await ctx.send("Having trouble finding that user\'s info. Please check"
" and try again.")
# Notify if person doens't have necessary role
@setwhois.error
async def setwhois_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
await ctx.send('Quit posing, you candy-ass! You don\'t tell me what to do!')
@commands.command()
@commands.has_any_role('leaders', 'admin')
async def rocksays(self, ctx, destination: discord.TextChannel=None, *, msg: str):
#Makes the bot say something in the specified channel
if not destination.permissions_for(ctx.author).send_messages:
return await ctx.message.add_reaction("\N{WARNING SIGN}")
destination = ctx.message.channel if destination is None else destination
emoji = self.bot.get_emoji(466428026880786452)
await destination.send(msg)
return await ctx.message.add_reaction(emoji)
# Notify if person doens't have necessary role
@rocksays.error
async def rocksays_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
await ctx.send('Quit posing, you candy-ass! You don\'t tell me what to do!')
### Let me send things to command line
@commands.command()
@commands.is_owner()
async def sendpi(self, ctx, *, print_terminal: str):
time_sent = dt.now()
print('{} \n Time sent: {}'.format(print_terminal,time_sent))
confirm_emoji = '✔️'
return await ctx.message.add_reaction(confirm_emoji)
### listen in #attacks_log for bounce message, and update json with info
@commands.Cog.listener()
async def on_message(self, message):
bouncewords = ["bounce:",")."]
cutwords = ['cut a bitch','cut you','cut someone']
# captures bounce info
if (message.channel.name == 'attacks_log' and any(s in message.content
for s in bouncewords)):
bounce_msg = message.content
print(bounce_msg)
bouncer = [bounce_msg[17:bounce_msg.find(' [')].strip(),
bounce_msg[2:bounce_msg.find(' [')].strip()]
bouncetime = dt.utcnow().strftime("%c")
print(bouncetime)
print(bouncer)
with open(shameless_json, 'r') as f:
data = json.load(f)
for p_name, p_info in data.items():
if p_name in bouncer:
p_info['bounces'] += 1
p_info['last_bounce'] = bouncetime
with open(shameless_json, 'w') as f:
json.dump(data, f, indent=4, sort_keys=True)
elif (message.channel.id == 435902101810577458 and '__greater protection__ expired' in message.content):
list_of_stuff = []
list_of_stuff = message.content.split('\n')
for x in list_of_stuff:
if '__greater protection__ expired' in x:
gp_needer = re.search('\[(.+?)\]', x).group(1)
if gp_needer not in gp_supporters:
await message.channel.send('.requestfor {} gp note: added by Rockbot'.format(gp_needer))
print(gp_needer)
print(list_of_stuff)
#troll Kalrenz and his fat self
elif message.author.id == 88125388051447808 and 'ranch' in message.content.lower():
ranch_gifs = ['https://media.giphy.com/media/3oGRFnbLY1U9S5ZITm/giphy.gif',
'https://media.giphy.com/media/l3c5Kcw7NypsdWTPa/giphy.gif',
'https://media.giphy.com/media/lU5FniZJ03PMc/giphy.gif',
'https://media.giphy.com/media/26uf4ztgExZ3VEzUQ/giphy.gif',
'https://media.giphy.com/media/xT5LMq7AadOYYpEYdq/giphy.gif']
ranchitem = random.choice(ranch_gifs)
await message.channel.send('Here you go, fat-ass: {}'.format(ranchitem))
# Cleans up utopiabot spam from bad .list messages
elif 'not an option, try one of these' in message.content:
await message.delete(delay=3)
elif 'Sent PM to you, <@!460400451645472782>' in message.content:
await message.delete(delay=0)
# Cleans up stupid utopiabot's worthless dice rolls
elif 'You rolled the dice and you got' in message.content:
await message.delete(delay=0)
#Uses the .whois function in utopiabot to load a fun trigger for searched user
elif 'phone:' in message.content:
start = 'prov: '
end = ' | links:'
prov_to_search = (message.content.split(start))[1].split(end)[0]
with open(shameless_json, 'r') as f:
data = json.load(f)
for p_name, p_info in data.items():
if p_name != "misc" and p_name == prov_to_search:
await message.channel.send('{}'.format(p_info["whois"]))
elif any(s in message.content.lower() for s in cutwords):
await message.channel.send('https://giphy.com/gifs/imRiPoKJB9R9m')
@commands.command(pass_context=True)
async def bounces(self, ctx):
with open("/home/pi/cretobot/shameless77.json", "r") as f:
data = json.load(f)
not_here = 1
for p_name, p_info in data.items():
if p_name != "misc" and p_info["discord.id"] == ctx.author.id:
not_here = 0
return await ctx.send('You have bounced {} times this age, and your last '
'bounce was {}.\n'
'Way to go, dumbass!'.format(p_info["bounces"],
p_info["last_bounce"]))
if not_here == 1:
await ctx.send('Your Discord user is not currently linked to any '
'province for Rockbot commands. Please do `!provset '
'[Province Name]` to link, then try this command again.')
@commands.command(pass_context=True)
async def shamelist(self, ctx):
with open(shameless_json, 'r') as f:
data = json.load(f)
losers = {p_info['discord.id']: p_info['bounces'] for p_name, p_info in data.items()
if p_name != 'misc' and p_info['bounces'] > 0}
sortedlosers = sorted(losers.items(), key=lambda x: x[1], reverse=True)
embedlosers = '\n'.join('<@{}> | Bounces: {}'.format(loser_id, loser_bounces)
for loser_id, loser_bounces in sortedlosers)
embed = discord.Embed(title='The Shameless Shame-List', color=0xb20000)
embed.set_author(name='Age 84')
embed.add_field(name='Best of the worst:', value=embedlosers)
await ctx.send(embed=embed)
@commands.command(name='react')
async def react_log(self, ctx, msg_id: int):
_message = await ctx.get_message(msg_id)
results = {react.emoji: react.count for react in _message.reactions}
print('\n'.join('{}|{}'.format(x,y) for x,y in results.items()))
# await ctx.send(results)
await ctx.send('\n'.join('{} | {}'.format(candidate, count) for candidate,
count in results.items()))
@commands.command()
async def parrot(self, ctx, *, memeify:str=None):
img = Image.open('images/parrot.jpg')
fnt = ImageFont.truetype('Impact.ttf', 50)
d = ImageDraw.Draw(img)
d.text((50,50), 'SQUAWK!!! ' + memeify.upper(), font=fnt, fill=(0, 191, 255))
img.save('images/parrot_text.jpg')
await ctx.send(file=discord.File('images/parrot_text.jpg'))
@commands.command()
@commands.is_owner()
async def pass_file(self, ctx, filepath):
print('/home/pi/cretobot/'+filepath)
await ctx.send(file=discord.File('/home/pi/cretobot/'+filepath))
@commands.command()
async def jack(self, ctx, *, memeify:str=None):
img = Image.open('images/jack.gif')
fnt = ImageFont.truetype('Arial_Bold_Italic.ttf', 20)
d = ImageDraw.Draw(img)
d.text((50,50), memeify, font=fnt, fill=(255, 255, 0))
img.save('images/jack_text.gif')
await ctx.send(file=discord.File('images/jack_text.gif'))
@commands.command(aliases=["bk"], pass_context=True)
async def bounceking(self, ctx):
with open(shameless_json, 'r') as f:
data = json.load(f)
bounce_list = {p_info['discord.id']: p_info['bounces'] for p_name, p_info in data.items()
if p_name != 'misc'}
sorted_bouncers = sorted(bounce_list.items(), key=lambda x: x[1], reverse=True)
bounceking = self.bot.get_user(sorted_bouncers[0][0])
print(bounceking)
embed = discord.Embed(title='The Shameless Bounce King', color=0xffd700)
embed.set_thumbnail(url=
'https://cdn.discordapp.com/attachments/404868940683149332/497485025743470602/BOUNCE-KING.png')
embed.add_field(name='\u200b', value='<@{}> | Bounces: {}'.format(
sorted_bouncers[0][0], sorted_bouncers[0][1]))
await ctx.send(content='All hail the Bounce King!', embed=embed)
@commands.command(aliases=["converttemp", "temp", "convert"])
async def tempconvert(self, ctx, temp:int, system):
if system.lower() == "f":
converted_temp = int((temp - 32) * 5 / 9)
return await ctx.send("{}\u00b0F is equal to {}\u00b0C".format(temp,converted_temp))
elif system.lower() == "c":
converted_temp = int((temp * 9 / 5) + 32)
return await ctx.send("{}\u00b0C is equal to {}\u00b0:flag_us:".format(temp,converted_temp))
else:
return await ctx.send("Please use the format: ```!tempconvert [temperature] [C/F]```")
@commands.command()
async def gold(self, ctx):
role_info = discord.utils.get(ctx.guild.roles, id=715567892644626483)
embed1 = '\n'.join(('{}'.format(x.display_name)) for x in role_info.members)
embed = discord.Embed(title='Members With Gold Status', color=0xd4af37)
embed.add_field(name='\u200b', value = embed1)
await ctx.send(content='Here you go jabroni:', embed=embed)
@commands.command()
async def hasrole(self, ctx, passed_role):
passed_role_id = int(passed_role.strip('<@&>'))
print(passed_role_id)
role_info = discord.utils.get(ctx.guild.roles, id=passed_role_id)
role_color_stripped = role_info.color
role_color_stripped = role_color_stripped.strip('#')
role_color = '0x'.join(role_color_stripped)
print(role_color)
embed1 = '\n'.join(('{}'.format(x.display_name)) for x in role_info.members)
embed = discord.Embed(title='Members with {} role:', color=role_color)
embed.add_field(name='\u200b', value=embed1)
await ctx.send(content='Here you go jabroni:', embed=embed)
print(role_info.name)
print(role_color)
def setup(bot):
bot.add_cog(Meta(bot))
| [
"cretoria@cretoria.rocks"
] | cretoria@cretoria.rocks |
13ab29e5ceffbff9de60963df7ec385ba55cad77 | ee6fc02e8392ff780a4f0d1a5789776e4d0b6a29 | /code/abc/150/abc150_b.py | c748c5aa7445dd342d4b813baf31ee0e24d77bef | [] | no_license | mollinaca/ac | e99bb5d5c07159b3ef98cd7067424fa2751c0256 | 2f40dd4333c2b39573b75b45b06ad52cf36d75c3 | refs/heads/master | 2020-12-22T11:02:13.269855 | 2020-09-18T01:02:29 | 2020-09-18T01:02:29 | 236,757,685 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
n = int(input())
s = str(input())
count = 0
for i in range(0,n):
if s[i] == "A":
if i+2 <= n-1:
if s[i+1] == "B" and s[i+2] == "C":
count += 1
print (count) | [
"github@mail.watarinohibi.tokyo"
] | github@mail.watarinohibi.tokyo |
40b38f3402e6f240a77ba29372569fef8f2c69f6 | f8acaa3210a5eb392d5cd2567a000f26e812cfb9 | /2015/python/day12/main.py | fe6f3c8c3e41333b75fef811e0c1e9525afbb072 | [] | no_license | andrewpickett/advent-of-code | aeb105f7e50cfe5b7ebc80e10bc58451b5a00ac6 | ab3fa828257fa0e9f64f07397d3012274b8bebaa | refs/heads/master | 2023-01-22T23:28:46.989329 | 2022-12-28T18:06:50 | 2022-12-28T18:06:50 | 224,327,063 | 2 | 0 | null | 2023-01-08T11:09:58 | 2019-11-27T02:27:23 | Python | UTF-8 | Python | false | false | 918 | py | import json
import re
from aoc_utils import run_with_timer
data = open('input.txt').readline().strip()
matcher = re.compile('[ a-zA-Z:,}{"\]\[]')
def part_one():
return sum([int(x) for x in matcher.split(data) if x != ''])
def contains_red(root_obj):
if type(root_obj) is dict:
for k, v in root_obj.items():
if k == 'red':
return True
if type(v) in (list, dict) and contains_red(v):
root_obj[k] = ''
elif v == 'red':
return True
elif type(root_obj) is list:
for y in [x for x in root_obj if type(x) in (list, dict) and contains_red(x)]:
root_obj.remove(y)
else:
return False
def part_two():
json_data = json.loads(data)
if contains_red(json_data):
json_data.clear()
return sum([int(x) for x in matcher.split(json.dumps(json_data)) if x != ''])
if __name__ == '__main__':
run_with_timer(part_one) # 119433 -- took 6 ms
run_with_timer(part_two) # 68466 -- took 6 ms
| [
"picketta@gmail.com"
] | picketta@gmail.com |
078218e2b5abb01b946103d17fbedfcc0ad5049c | fa563a00bd22331e8e6050824247414d18795c50 | /fit_scripts.py | cfe283a62151e46b1ffc96ea73dda486d79d5ddb | [] | no_license | firest11/thesis-analysis-modules | b4dd46dcdf6e45f643089ebc011c0d3e2f80acb1 | 4b7f712be1c97abeee3cd8afd6bf23b24bafa000 | refs/heads/main | 2023-03-24T23:13:03.621331 | 2021-03-25T18:46:49 | 2021-03-25T18:46:49 | 351,541,338 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,947 | py | import sys
import functools as ft
import numpy as np
import scipy.linalg as scilin
import scipy.optimize as opt
import scipy.sparse as sps
from scipy.sparse import linalg as splinalg
from . import fitfuncs as ffunc
def sparse_cholesky(A):
# Sparse Matrix Implementation of Cholesky Decomposition
#
# taken from:
# https://gist.github.com/omitakahiro/c49e5168d04438c5b20c921b928f1f5d
#
# matrix A must be a sparse symmetric positive-definite.
#
n = A.shape[0]
LU = splinalg.splu(A, diag_pivot_thresh=0) # sparse LU decomposition
# check the matrix A is positive definite below.
if (LU.perm_r == np.arange(n)).all() and (LU.U.diagonal() > 0).all():
return LU.L.dot(sps.diags(LU.U.diagonal()**0.5))
else:
sys.exit('The matrix is not positive definite')
# ----------------------- For Scipy.Optimize.Minimize -----------------------#
def chisquare(xo, xdata, ydata, covar, func, priors=None):
# returns $\chi^2_{\nu}$
dof = len(xdata) - len(xo)
res = func(xdata, *xo) - ydata
L = scilin.cholesky(covar, lower=True)
xres = scilin.solve_triangular(L, res, lower=True)
chisq = xres.dot(xres.conj())/dof
if priors is None:
return chisq
else:
popt, perr = priors
prior = 0
for j in range(len(popt)):
prior += ((xo[j+1] - popt[j])**2)/(perr[j]**2)
return chisq + 0.5*prior
def chiJac(jac, xo, xdata, ydata, covar, func, priors=None):
dof = len(xdata) - len(xo)
res = func(xdata, *xo) - ydata
L = scilin.cholesky(covar, lower=True)
# (J * Linv) * (Linv.H * res.conj)
lhs = scilin.solve_triangular(L, jac(xdata, *xo), lower=True)
rhs = scilin.solve_triangular(L, res)
chiJ = lhs.conj().T.dot(rhs)/dof
if priors is None:
return 2*chiJ.real
else:
popt, perr = priors
prior_jac = np.zeros(len(popt))
for j in range(len(popt)):
prior_jac[j] = (xo[j+1] - popt[j])/(perr[j]**2)
return 2*chiJ.real + prior_jac
def chiHess(jac, hess, xo, xdata, ydata, covar, func, priors=None):
dof = len(xdata) - len(xo)
res = func(xdata, *xo) - ydata
L = scilin.cholesky(covar, lower=True)
# Part (a) with Hessian: (Hess * Linv).H * (Linv * res)
lhs_a = scilin.solve_triangular(L, hess(xdata, *xo), lower=True)
rhs_a = scilin.solve_triangular(L, res, lower=True)
a_term = lhs_a.conj().dot(rhs_a)/dof
# Part (b) Jacobians: (Jac * Linv).H * (Linv * Jac)
xJac = scilin.solve_triangular(L, jac(xdata, *xo), lower=True)
b_term = xJac.conj().dot(xJac)
return 2*(a_term + b_term)
def minsolve(xdata, ydata, covar, func, xo,
cJac=None, cHess=None, *args, **kwargs):
# scipy.minimize implementation of solving my problems
# cJac is the parameter jacobian of func
# cHess is the parameter hessian of func
# *args and **kwargs are inputs for scipy.optimize.minimize
my_args = (xdata, ydata, covar, func)
if cJac is None:
res = opt.minimize(chisquare, xo, args=my_args, *args, **kwargs)
else:
wrap_jac = ft.partial(chiJac, cJac)
if cHess is None:
res = opt.minimize(chisquare, xo, args=my_args,
jac=wrap_jac, *args, **kwargs)
else:
wrap_hess = ft.partial(chiHess, cJac, cHess)
res = opt.minimize(chisquare, xo, args=my_args,
jac=wrap_jac, hess=wrap_hess,
*args, **kwargs)
if not res.success:
rmes = "Status: {0};\nMessage: {1}".format(res.status, res.message)
raise Exception(rmes)
return res.x
# ------------------------- Effective Mass Plot -------------------------#
def eff_mass(xdata, ydata, miter=1000, verbose=False):
dratio = ydata[1:]/ydata[:-1]
meff, xvals = [], []
def res(E, xdat, ydat):
return ffunc.c2pt_smeson_ratio(E, xdat) - ydat
for j, xdat in enumerate(xdata[1:]):
try:
mval = opt.brentq(
res, 0, 1, args=(xdat, dratio[j]), maxiter=miter
)
meff.append(mval)
xvals.append(xdat)
except Exception as e:
if verbose:
print("Error: {0}".format(e))
if len(meff) == 0:
raise Exception("Empty Effective Mass Plot")
meff, xvals = np.asarray(meff), np.asarray(xvals)
return xvals, meff
# ------------------------- C3pt Fits ------------------------- #
def gen_lstsq_FF_sumFit(dts, dat, cov, chisquare=True):
# Solves generalized least-squares problem
# Summation Method for FormFactor = c3pt * FF_norm
def linfit(x, a, b):
return a*x + b
X = ffunc.sumFit_X(dts)
L = scilin.cholesky(cov, lower=True)
Xl = scilin.solve_triangular(L, X, lower=True)
yl = scilin.solve_triangular(L, dat, lower=True)
val, res, rnk, d = scilin.lstsq(Xl, yl)
if not chisquare:
return val
else:
xres = dat - linfit(dts, *val)
dof = 2*(len(dts) - 2)
chisq = xres.conj().dot(scilin.inv(cov).dot(xres))
return val, chisq.real/dof
def gen_lstsq_FF_nstateFit(Esnk, Esrc, dtaus, dat, cov, chisquare=True):
# Solves generalized least-squares problem
# For n-state fit for FormFactor = c3pt*FF_norm
# breakpoint()
X = ffunc.FF_nstateFit_X(Esnk, Esrc, dtaus)
L = scilin.cholesky(cov, lower=True)
Xl = scilin.solve_triangular(L, X, lower=True)
yl = scilin.solve_triangular(L, dat, lower=True)
val, res, rnk, d = scilin.lstsq(Xl, yl)
if not chisquare:
return val
else:
res = dat - X.dot(val)
xres = scilin.solve_triangular(L, res, lower=True)
dof = 2*(len(dtaus) - len(val))
chisq = xres.conj().dot(xres)/dof
# chisq = res.conj().dot(scilin.inv(cov).dot(res))/dof
if dof < 0:
print(xres)
print('what?')
return val, chisq.real
def gen_lstsq_c3pt_nstateFit(popt_snk, popt_src, dtaus,
dat, cov, chisquare=True):
# Solves generalized least-squares problem
# For n-state fit for c3pt
X = ffunc.c3pt_nstateFit_X(popt_snk, popt_src, dtaus)
L = scilin.cholesky(cov, lower=True)
Xl = scilin.solve_triangular(L, X, lower=True)
yl = scilin.solve_triangular(L, dat, lower=True)
val, res, rnk, d = scilin.lstsq(Xl, yl)
if not chisquare:
return val
else:
res = dat - X.dot(val)
xres = scilin.solve_triangular(L, res, lower=True)
dof = 2*(len(dtaus) - len(val))
chisq = xres.conj().dot(xres)/dof
return val, chisq
def FF_sumFit(dts, tau_o, data, dblocks, chisquare=False):
# data[wlines, dt, tau]
wshape, dt_shape, tau_shape = data.shape
nbins = dblocks.shape[0]
coeff = (nbins-1.0)/nbins
dsums = np.array([
[data[k, j, tau_o:dt-(tau_o+1)].sum() for j, dt in enumerate(dts)]
for k in range(wshape)
])
dsums_blocks = (
np.array([[dblk[k, j, tau_o:dt-(tau_o+1)].sum()
for j, dt in enumerate(dts)]
for dblk in dblocks]) for k in range(wshape)
) # shape (wlines, blocks, dts) # (blocks, wlines, dts)
dsums_cov = (
coeff*np.sum(np.array([
np.outer((dsms - dsums[k]).conj(), dsms-dsums[k])
for dsms in dsums_blks]), axis=0)
for k, dsums_blks in enumerate(dsums_blocks)
)
if chisquare:
chisqs = np.zeros(wshape)
vals = np.zeros((wshape, 2), dtype=data.dtype)
for k, dcov in enumerate(dsums_cov):
if chisquare:
val, chisq = gen_lstsq_FF_sumFit(
dts, dsums[k], dcov, chisquare=chisquare
)
vals[k] = val
chisqs[k] = chisq
else:
val = gen_lstsq_FF_sumFit(dts, dsums[k], dcov,
chisquare=chisquare)
vals[k] = val
if chisquare:
return vals, chisqs
else:
return vals
def FF_forward_nstateFit(dts, tau_o, E, data, dblocks, chisquare=False):
wshape, dt_shape, tau_shape = data.shape
nbins = dblocks.shape[0]
coeff = (nbins-1.0)/nbins
dtaus = ffunc.flatten_taus(dts, tau_o)
dflats = np.array([ffunc.flatten_c3pt(data[k], dts, tau_o)
for k in range(wshape)])
dflats_blocks = (
[ffunc.flatten_c3pt(dblk[k], dts, tau_o) for dblk in dblocks]
for k in range(wshape)
)
dflats_cov = (
coeff*np.sum(np.array([
np.outer((dflt - dflats[k]).conj(), dflt - dflats[k])
for dflt in dflats_blks]), axis=0)
for k, dflats_blks in enumerate(dflats_blocks)
)
if chisquare:
chisqs = np.zeros(wshape)
vals = None
for k, dcov in enumerate(dflats_cov):
if chisquare:
print('Incomplete')
break
raise Exception('Incomplete Function')
def FF_nstateFit(dts, tau_o, Esnk, Esrc, data, dblocks, chisquare=False):
wshape, dt_shape, tau_shape = data.shape
nbins = dblocks.shape[0]
coeff = (nbins-1.0)/nbins
dtaus = ffunc.flatten_taus(dts, tau_o)
dflats = np.array([ffunc.flatten_c3pt(data[k], dts, tau_o)
for k in range(wshape)])
dflats_blocks = (
[ffunc.flatten_c3pt(dblk[k], dts, tau_o) for dblk in dblocks]
for k in range(wshape)
)
dflats_cov = (
coeff*np.sum(np.array([
np.outer((dflt - dflats[k]).conj(), dflt - dflats[k])
for dflt in dflats_blks]), axis=0)
for k, dflats_blks in enumerate(dflats_blocks)
)
if chisquare:
chisqs = np.zeros(wshape)
vals = None
for k, dcov in enumerate(dflats_cov):
if chisquare:
val, chisq = gen_lstsq_FF_nstateFit(
Esnk, Esrc, dtaus, dflats[k], dcov, chisquare=chisquare
)
if vals is None:
vals = np.array([val])
else:
vals = np.vstack((vals, np.array([val])))
chisqs[k] = chisq
else:
val = gen_lstsq_FF_nstateFit(
Esnk, Esrc, dtaus, dflats[k], dcov, chisquare=chisquare
)
if vals is None:
vals = np.array([val])
else:
vals = np.vstack((vals, np.array([val])))
if chisquare:
return vals, chisqs
else:
return vals
def c3pt_nstateFit(dts, tau_o, popt_snk, popt_src,
data, dblocks, chisquare=False):
wshape, dt_shape, tau_shape = data.shape
nbins = dblocks.shape[0]
coeff = (nbins-1.0)/nbins
dtaus = ffunc.flatten_taus(dts, tau_o)
dflats = np.array([ffunc.flatten_c3pt(data[k], dts, tau_o)
for k in range(wshape)])
dflats_blocks = (
[ffunc.flatten_c3pt(dblk[k], dts, tau_o) for dblk in dblocks]
for k in range(wshape)
)
dflats_cov = (
coeff*np.sum(np.array([
np.outer((dflt - dflats[k]).conj(), dflt - dflats[k])
for dflt in dflats_blks]), axis=0)
for k, dflats_blks in enumerate(dflats_blocks)
)
if chisquare:
chisqs = np.zeros(wshape)
vals = None
for k, dcov in enumerate(dflats_cov):
if chisquare:
val, chisq = gen_lstsq_c3pt_nstateFit(
popt_snk, popt_src, dtaus, dflats[k], dcov, chisquare=chisquare
)
if vals is None:
vals = np.array([val])
else:
vals = np.vstack((vals, np.array([val])))
chisqs[k] = chisq
else:
val = gen_lstsq_c3pt_nstateFit(
popt_snk, popt_src, dtaus, dflats[k], dcov, chisquare=chisquare
)
if vals is None:
vals = np.array([val])
else:
vals = np.vstack((vals, np.array([val])))
if chisquare:
return vals, chisqs
else:
return vals
| [
"cshuger1@gmail.com"
] | cshuger1@gmail.com |
cf85859497e9262ab0792ec4e552abbecf6d8798 | 68b7e05830d2480e848b0d1ff49f455e3c2e3a3c | /manage.py | 70ae5959854c8281d4a31549726dba3ecf87c16d | [] | no_license | Zauberzunge/Umfragen | 24414567ad8dfeb89a5b7267841a08bf6d035625 | 3e57da7e87d2aebc596878800fd4fe8008f38944 | refs/heads/master | 2023-01-28T20:02:19.044334 | 2020-12-07T21:06:47 | 2020-12-07T21:06:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
from django.core.management.commands.runserver import Command as runserver
runserver.default_port = "8002"
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangoProject.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Could not import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"="
] | = |
5a5eef8f8e486201700fc8211ccb9747a4e5b131 | 1c8751548b6453102d35f4ca314f8566b428a2ba | /autotable/autotable/settings.py | 3060041834f5358483fd637886eae83f68312741 | [] | no_license | ch-liux/pyweb | fe78f1947cea6135aa331bc4fa4cd7c538b0f461 | cc1b99c3459388fd9276d8c78278abae90b99f78 | refs/heads/master | 2022-02-03T06:43:54.957461 | 2019-05-21T10:07:10 | 2019-05-21T10:07:10 | 173,143,776 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,600 | py |
import os, platform
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_SYS = platform.system()
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*w@^=n9@z978t=l1=do!&ig*2vu-cq(93()ql=r2_#$zi5-lsk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
if BASE_SYS == 'Linux':
DEBUG = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps.user',
'captcha', # 图片验证码
'pure_pagination', # 分页
]
# 自定义user
AUTH_USER_MODEL = 'user.UserProfile'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'autotable.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'autotable.wsgi.application'
# 数据库
DATABASES = {
'default': {
'NAME': 'autotable',
'ENGINE': 'django.db.backends.mysql',
'USER': 'root',
'PASSWORD': 'Admin228.',
'HOST': '127.0.0.1',
'PORT': 3306
}
}
# Password validation
AUTH_PASSWORD_VALIDATORS = [
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',},
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',},
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',},
{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',},
]
# Internationalization
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
if BASE_SYS == 'Linux':
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
elif BASE_SYS == 'Windows':
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
# captcha验证码
# 图片大小
CAPTCHA_IMAGE_SIZE = (120, 45)
CAPTCHA_NOISE_FUNCTIONS = (
#'captcha.helpers.noise_null', #没有样式
'captcha.helpers.noise_arcs', #线
'captcha.helpers.noise_dots' #点
)
# 图片中的文字为数字表达式,如2+2=
CAPTCHA_CHALLENGE_FUNCT = 'captcha.helpers.math_challenge'
# 字符个数
# CAPTCHA_LENGTH = 4
# 超时(minutes)
CAPTCHA_TIMEOUT = 1
# 当未登录时跳转链接
LOGIN_URL = '/user/login'
# 加密方式
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.CryptPasswordHasher',
]
# redis缓存
# CACHES = {
# "default": {
# "BACKEND": "django_redis.cache.RedisCache",
# "LOCATION": "redis://127.0.0.1:6379/1",
# "OPTIONS": {
# "CLIENT_CLASS": "django_redis.client.DefaultClient",
# "CONNECTION_POOL_KWARGS": {"max_connections": 20},
# "PASSWORD": "123456",
# }
# }
# }
# SESSION_ENGINE = "django.contrib.sessions.backends.cache"
# SESSION_CACHE_ALIAS = "default"
# 控制台打印SQL
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console':{
'level':'DEBUG',
'class':'logging.StreamHandler',
},
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'propagate': True,
'level':'DEBUG',
},
}
}
| [
"343134072@qq.com"
] | 343134072@qq.com |
cfdfb8b0744cfad950f40ca7d6ea763d5963d48b | d69f24293c56b34c5b1508ed112d791a61e0c945 | /04-Milestone Project - 1/04Project_01_Tic_Tac_Toe.py | 60ce13465af299353efcbcf0f9320a08b0758533 | [] | no_license | NajmusShayadat/Complete-Python-3-Bootcamp | 4d83465a73a6efb2e3acd601c578104c5c568e0e | 001ac480490f07fd190806f490747c8e7a9e32b6 | refs/heads/master | 2020-09-12T03:42:12.732715 | 2020-06-28T19:11:42 | 2020-06-28T19:11:42 | 222,292,620 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,474 | py | # Project description:
# Create a Tic Tac Toe game. You are free to use any IDE you like.
#
# Here are the requirements:
#
# 2 players should be able to play the game (both sitting at the same computer)
# The board should be printed out every time a player makes a move
# You should be able to accept input of the player position and then place a symbol on the board
# The work flow:
# Ask input
# Check input (find number)
# Add input
# Show board
# Check win
# Check move (find blank)
##############################
# from IPython.display import clear_output# Global variables # to run in any IDE other than Jupyter notebook
def show_board(v_p, v_g):
"""
DOCSTRING: Print two boards, One showing available positions, another one showing the game progress!
:param v_p: board one, position list
:param v_g: board two, game display list
"""
# clear_output() # to run in jupyter notebook
print('\n' * 100) # to run in any IDE other than Jupyter notebook
# print the board
print(" Available\t Game board\n positions\t display!\n")
print(f" {v_p['p7']} | {v_p['p8']} | {v_p['p9']} \t {v_g['g7']} | {v_g['g8']} | {v_g['g9']}")
print("-----------\t -----------")
print(f" {v_p['p4']} | {v_p['p5']} | {v_p['p6']} \t {v_g['g4']} | {v_g['g5']} | {v_g['g6']}")
print("-----------\t -----------")
print(f" {v_p['p1']} | {v_p['p2']} | {v_p['p3']} \t {v_g['g1']} | {v_g['g2']} | {v_g['g3']}")
def initiate_game(preset):
"""
DOCSTRING: a list of 10 elements becomes a list of 9 elements (X and O in sequence according to player 1's choice)
:param preset: a list, containing 10 moves ['X', 'O', 'X', 'O', 'X', 'O', 'X', 'O', 'X', 'O']
:return: a lis of 9 moves in sequence
"""
p1symbol = input("\nPlayer 1, Pick X or O and press enter: ")
if 'X' in p1symbol.upper():
preset.pop(-1)
elif 'O' in p1symbol.upper():
preset.pop(0)
else:
# clear_output() # to run in jupyter notebook
print('\n' * 100) # to run in any IDE other than Jupyter notebook
print("\n\nWrong input!")
initiate_game(preset)
return preset
def use_input(loc, symbols, v_p, v_g):
"""
DOCSTRING: asks for input position checks the input validity
and shows the board after modifying the position list and game display list
:param loc: an integer representing the turn number.
:param symbols: a list containing 9 elements of X and O in sequence
:param v_p: board one, position list
:param v_g: board two, game display list
"""
inp = int(input(f"Player {(loc % 2 != 0) + 1} ({symbols[loc]}), please enter your next position: "))
if inp in v_p.values():
pKey = 'p' + str(inp)
gKey = 'g' + str(inp)
v_p[pKey] = ' '
v_g[gKey] = symbols[loc]
show_board(v_p, v_g)
else:
print('\n\nWrong Input! Try again!')
use_input(loc, symbols, v_p, v_g)
def check_win(vg):
"""
DOCSTRING: checks all the winning conditions from game board display list
:param vg: game board display list
:return: Boolean
"""
return (vg['g1'] == vg['g2'] == vg['g3'] != ' ') or (vg['g4'] == vg['g5'] == vg['g6'] != ' ') or (
vg['g7'] == vg['g8'] == vg['g9'] != ' ') or (vg['g1'] == vg['g4'] == vg['g7'] != ' ') or (
vg['g2'] == vg['g5'] == vg['g8'] != ' ') or (vg['g3'] == vg['g6'] == vg['g9'] != ' ') or (
vg['g1'] == vg['g5'] == vg['g9'] != ' ') or (vg['g3'] == vg['g5'] == vg['g7'] != ' ')
def check_move(vg):
"""
DOCSTRING: Checks for blank space in game board display list.
:param vg: game board display list.
:return: Boolean
"""
return ' ' in vg.values()
def replay():
"""
DOCSTRING: takes an input for yes or no and checks only the first letter is lower case y.
:return: Boolean
"""
return input('Play again? Enter Yes or No: ').lower().startswith('y')
while True:
# clear_output() # to run in jupyter notebook
print('\n' * 100) # to run in any IDE other than Jupyter notebook
print("Welcome to Tic Tac Toe!")
# Declare variables dictionaries representing two display board
positionboard = {'p1': 1, 'p2': 2, 'p3': 3, 'p4': 4, 'p5': 5, 'p6': 6, 'p7': 7, 'p8': 8, 'p9': 9}
gameboard = {'g1': ' ', 'g2': ' ', 'g3': ' ', 'g4': ' ', 'g5': ' ', 'g6': ' ', 'g7': ' ', 'g8': ' ', 'g9': ' '}
# a set of 10 moves
moveset = ['X', 'O', 'X', 'O', 'X', 'O', 'X', 'O', 'X', 'O']
turns = initiate_game(moveset) # all 9 moves in a sequence depending on the initial pick 'X' or 'O'
show_board(positionboard, gameboard) # Displays 2 boards
game_on = True
while game_on:
for i in range(9): # max 9 moves
wincheck = check_win(gameboard) # checking if winning condition is met
if wincheck:
print(f"\n\n\tCongratulations!\n\tPlayer {1 + int((i - 1) % 2 != 0)} wins!")
game_on = False
break
else:
movecheck = check_move(gameboard) # checking if moves available
if movecheck:
use_input(i, turns, positionboard, gameboard) # asking for input again
else:
print('\n\n Match draw')
game_on = False
break
if not replay(): # ask for replay the game
break
| [
"najmus.bappy@gmail.com"
] | najmus.bappy@gmail.com |
68e7cf5b54d4085ab60ad38a20316d67bc64bb57 | 245e442a12da265bef4043575ac679ef7f1df800 | /core/strings.py | aa91eee12ae078482e88d4f37a017212828a99a2 | [
"MIT"
] | permissive | Dwarf-Community/Dwarf | b7c38db5f55462c658f3a8cf9ba8be85eaadc150 | 7b23e411198cc1b73c3923325d2cb84a2d3da53b | refs/heads/master | 2021-01-19T00:24:08.193809 | 2018-03-08T21:17:30 | 2018-03-08T21:17:30 | 73,002,090 | 12 | 10 | MIT | 2018-01-18T20:53:55 | 2016-11-06T16:06:07 | Python | UTF-8 | Python | false | false | 843 | py | """Externalized strings for better structure and easier localization"""
failed_to_install = "Failed to install '**{}**'."
failed_to_update = "Failed to update '**{}**'."
specify_extension_name = "Please specify a name for this extension (must be a valid Python package name)."
skipping_this_extension = "Not installing this extension."
unsatisfied_requirements = "Missing required packages:"
unsatisfied_dependencies = "Missing required extensions:"
prompt_install_requirements = "Do you want to install the missing required packages?"
would_be_uninstalled_too = str("Extensions that would be uninstalled as well "
"as they depend on '**{}**':")
proceed_with_uninstallation = str("Do you want to proceed? This will uninstall "
"the above listed extensions. (yes/no)")
| [
"aileenfromthemoon@gmail.com"
] | aileenfromthemoon@gmail.com |
901d73e1e1b9fbab700e456ee163cba1d0d65fe4 | 9fcc6ed9d6ddff6d183a891066f6e2be5c3875e8 | /pandasdmx/source/sgr.py | cb73a9a7c0c1aad9de3316676facf7c3269555fc | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"Python-2.0"
] | permissive | daoluan/pandaSDMX | ea289db699d2516cf563194214d1e70adb61dca7 | 2efcb5a429a5306efd89bed4cd55946d1ad5067b | refs/heads/master | 2020-07-12T21:37:20.617115 | 2019-08-28T11:09:59 | 2019-08-28T11:09:59 | 204,912,582 | 0 | 0 | Apache-2.0 | 2019-08-28T19:08:08 | 2019-08-28T11:08:08 | null | UTF-8 | Python | false | false | 667 | py | from . import Source as BaseSource
class Source(BaseSource):
_id = 'SGR'
def handle_response(self, response, content):
"""SGR responses do not specify content-type; set it directly."""
if response.headers.get('content-type', None) is None:
response.headers['content-type'] = 'application/xml'
return response, content
def modify_request_args(self, kwargs):
"""SGR is a data source but not a data provider.
Override the ``agency`` argument by setting ``agency='all'`` to
retrieve all data republished by SGR from different providers.
"""
kwargs.setdefault('provider', 'all')
| [
"mail@paul.kishimoto.name"
] | mail@paul.kishimoto.name |
b1afb31ff5ab29a6dfcf2d2608cafbc1dfd319c0 | 89782dc50bf2e497b1d7b74a85b1d7e1c8c2efad | /Lexer+Parser/TestRunner.py | 809c85471708a0a1eecb1277d9b36839064a4999 | [] | no_license | JiadingGai/looc | 0b1ed83c86c85c398e703d976dafce34b700f895 | b09a2280e2a4fc9286336f38751cc1a65b423a65 | refs/heads/master | 2021-06-22T00:28:29.076528 | 2017-01-18T04:07:34 | 2017-01-18T04:07:34 | 42,084,889 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | #!/usr/bin/python
import os
from os.path import expanduser
HOME = expanduser("~")
DEV_HOME = HOME + "/dev"
PROJ_HOME = DEV_HOME + "/looc"
TESTS_HOME = PROJ_HOME + "/tests"
LPSRC_HOME = PROJ_HOME + "/Lexer+Parser"
for file in os.listdir(TESTS_HOME):
if file.endswith(".cl"):
print(file)
| [
"paul.paul.mit@gmail.com"
] | paul.paul.mit@gmail.com |
392e821bc418413b9d4e27fa9d57fc910671a7a0 | 0a574474298f064a662d48493ab6674a78d30e28 | /6.画像処理/pic/kadai6.py | c79d971dced06033333b01fd4c302683ba59379b | [] | no_license | aoki0430/Experiments-Report | ebbef6d1efd3d704a4a83c70213df343d9830957 | 93667b451de48eee41422a7124082ee0ea0469a5 | refs/heads/master | 2020-03-23T00:02:32.615382 | 2019-10-05T14:02:35 | 2019-10-05T14:02:35 | 140,840,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | # encoding: utf8
from __future__ import unicode_literals
from pylab import *
from cv2 import imread
# 画像を読み込み
gazo = imread( "kadai6.bmp", 0 )
# 画像を表示
imshow(gazo, cmap="gray", vmin=0, vmax=255, interpolation="None")
# ヒストグラムを表示
figure()
hist( gazo.flatten(), 256, (0,255) )
# 画像を変換
gazo2 = zeros((240,384))
for x in range(384):
for y in range(240):
gazo2[y][x] = (255*(gazo[y][x]-120)/30) + 0
# 画像を表示
figure()
imshow(gazo2, cmap="gray", vmin=0, vmax=255, interpolation="None")
# ヒストグラムを表示
figure()
hist( gazo2.flatten(), 256, (0,255) )
show()
| [
"31432520+garuda02@users.noreply.github.com"
] | 31432520+garuda02@users.noreply.github.com |
0619150eff07bbe60a9990b60993647c6c0bdfe2 | 71c5b98a7291ffc291fa1133d176db11abc3cf8e | /pixie/manage.py | f8d43125480be9ede8df6fb225438f290e6c9473 | [] | no_license | turbonemesis/django-dot-pkce-example | 92c6e43c81a54ffad435dd84ee6ecb46b5c81682 | d25f5eefc4911927a2a486601b5d14fe79df7dfe | refs/heads/master | 2023-05-21T23:08:55.365823 | 2020-05-07T07:12:18 | 2020-05-07T07:12:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pixie.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"patrick.retornaz@liip.ch"
] | patrick.retornaz@liip.ch |
a9eca09ed76c928a0375568f37bd3d86dedcb313 | 042bc9c982ac55cd4d20d87202c483f2093b6ece | /q8_chocolate_bar_cooling.py | 0111f187f1f8ba762b6418226d9b45c2774fb7b9 | [] | no_license | eduardo-jh/HW22_Heat_transfer_boundary_conditions | b279c8139afc00c1a455cdaf36252dab090d1c5e | 45efa0f61fa9ed3df9b1ddbef7e1a338a4803d26 | refs/heads/main | 2023-04-07T21:18:49.742464 | 2021-04-14T20:00:32 | 2021-04-14T20:00:32 | 357,329,806 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,996 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
BE523 Biosystems Analysis & Design
HW22 - Question 8. Cooling of a chocolate bar
Prof. Waller solution
Created on Mon Apr 12 13:20:01 2021
@author: eduardo
"""
import numpy as np
import matplotlib.pyplot as plt
T_air = 20 # ambient temperature in C
L = 0.012 # thickness of solid in m
tmax = 400 # simulation period in sec
nx = 20 # number of space steps
nt = 800 # number of time steps
b_initial = 40 # initial solid temperature
h = 60 # aerodynamic conductivity in W/m/K
k = 0.19 # thermal conductivity for the solid W/m
Cp = 1170 # specific heat of the solid J /kg/ C
roe = 1200 # density of the solid kg /m3
f = 0.25
alpha = k / (Cp * roe) # thermal diffusivity
dt = tmax/nt # time step in sec
dx = L/nx # space step in m
print('dx =', dx, 'dt =', dt)
T = np.zeros((nx+1,nt+1)) # creating the solution matrix
x = np.linspace(0,L,nx+1) # the space vector
t = np.linspace(0,tmax,nt+1) # the time vector
A = (alpha*dt)/(dx**2)
B = 1 - 2*(alpha * dt)/ (dx**2)
print('B=', B)
T[:,0] = b_initial # the initial boundary condition
conditionleft = 3 # type of the left boundary condition
conditionright = 3 # type pf the right boundary condition
if conditionleft == 1:
T[0,1:nt+1] = 10
if conditionright == 1:
T[nx,1:nt+1] = 10
for n in range (nt):
# Neumann condition dT/dz = 0
if conditionleft == 2:
T[0,n] = T[2,n]
if conditionright == 2:
T[nx,n] = T[nx-1,n]
if conditionleft == 3: # I put both forms of the Robbins condition here. It will use the second one.
# T[0,n] = (dx*h/k*T_air+T[1,n])/(1+dx*h/k) # first order Taylor series for dT/dz
T[0,n] = (4*T[1,n]-T[2,n]+2*dx*(h/k*T_air))/(3+2*dx*h/k) # second order Taylor series for dT/dz
if conditionright == 3:
# T[nx,n] = (4*T[nx-1,n]-T[nx-2,n]+2*dx*(h/k*T_air))/(3+2*dx*h/k) # second order Taylor series for dT/dz
T[nx,n] = (dx*h/k*T_air+T[nx-1,n])/(1+dx*h/k) # first order Taylor series for dT/dz
# Dirichlet boundary condition
for i in range (1,nx):
T[i,n+1] = A * T[i+1,n] + B * T[i,n] + A * T[i-1,n]+f*dt
plotlayers = [10]
plt.figure(0)
for layer in plotlayers:
plt.plot(t,T[layer,:], label = 'T(t) at specific layer ({0})'.format(layer))
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=2, mode="expand", borderaxespad=0.)
plt.xlabel('Time (sec)')
plt.ylabel('Temperature (°C)')
plt.savefig('q8_chocolate_cooling_time_%dl_%dr.png' % (conditionleft, conditionright), dpi=300, bbox_inches='tight')
plt.show()
plottime = [50, 100, 200]
plt.figure(1)
for t in plottime:
plt.plot(x,T[:,t], label = 'temps after {0} seconds'.format(t))
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=2, mode="expand", borderaxespad=0.)
plt.xlabel('Thickness (m)')
plt.ylabel('Temperature (°C)')
plt.savefig('q8_chocolate_cooling_space_%dl_%dr.png' % (conditionleft, conditionright), dpi=300, bbox_inches='tight')
plt.show() | [
"eduardojh@email.arizona.edu"
] | eduardojh@email.arizona.edu |
d70865b359cb8d35ac9b9725e57b5da2f4d29582 | 41814be600f6d85da10a9bc04d422cae74beb0b7 | /code/convertUnits.py | adcbb6054ffac966dfb319a96e83b00d727a11d9 | [] | no_license | alexandru-cohal/Thesis-CBRSolPersonalizationCookingRecipes | eb8ba5e8a51ac9777408b34c6640532cfb4d6bf1 | 8d55cbed90df842503725198ebda8651a6862975 | refs/heads/master | 2023-02-14T03:46:36.593290 | 2021-01-11T22:23:35 | 2021-01-11T22:23:35 | 328,802,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,853 | py | unitsList = []
unitsGraph = []
def createGraphUnits():
global unitsList, unitsGraph
print("\n ----- Creating the graph of the cooking measurement units ... ----- \n")
unitsFile = open('units.txt', 'r')
unitsFileLines = unitsFile.read().splitlines()
for line in unitsFileLines:
conversion = line.split(' ')
quantity1 = float(conversion[0])
unit1 = conversion[1]
if unit1 not in unitsList:
unitsList.append(unit1)
unitsGraph.append([])
quantity2 = float(conversion[2])
unit2 = conversion[3]
if unit2 not in unitsList:
unitsList.append(unit2)
unitsGraph.append([])
unitsGraph[unitsList.index(unit1)].append((unit2, quantity2 / quantity1))
unitsGraph[unitsList.index(unit2)].append((unit1, quantity1 / quantity2))
print("\n ----- Creating the graph of the cooking measurement units DONE ----- \n")
def convertQuantity(quantity1, units1, units2):
inQueue = [0] * len(unitsList)
queueQuantity = [(units1, quantity1)]
if units1 in unitsList:
inQueue[unitsList.index(units1)] = 1
while queueQuantity:
elemQueueQuantity = queueQuantity.pop(0)
if elemQueueQuantity[0] == units2:
return elemQueueQuantity[1]
if elemQueueQuantity[0] in unitsList:
for conversion in unitsGraph[unitsList.index(elemQueueQuantity[0])]:
if inQueue[unitsList.index(conversion[0])] == 0:
queueQuantity.append((conversion[0], elemQueueQuantity[1] * conversion[1]))
inQueue[unitsList.index(conversion[0])] = 1
return -1
def main():
createGraphUnits()
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | noreply@github.com |
4b1d4915c4cda5fd3764804982f5f71cb271c593 | f694182e6b7d1281cacdbe716fcbce0d7419c2b3 | /system_communication_home/admin.py | d2978bcc5757836298bce803f4ef512ae2dd9158 | [] | no_license | Afollower/MiniProjectCommunication | 15cf07926095c7e39a4f0341832ae2a813619d83 | bdc5f8edc3637f1e80d1706de39281879290819b | refs/heads/master | 2022-09-20T08:14:43.971772 | 2020-06-04T09:09:46 | 2020-06-04T09:09:46 | 263,805,545 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | from django.contrib import admin
from .models import MPC_problem_sch, MPC_Problem_communication_sch
# Register your models here.
class problemAdmin(admin.ModelAdmin):
search_fields = ('pp_id', 'pp_name', 'project_id')
list_display = ('project_id', 'pp_id', 'pp_title')
class ppcAdmin(admin.ModelAdmin):
search_fields = ('project_id', 'pp_id', 'pp_com_id')
list_display = ('project_id', 'pp_id', 'pp_com_id', 'ppc_describe')
admin.site.register(MPC_problem_sch, problemAdmin)
admin.site.register(MPC_Problem_communication_sch, ppcAdmin)
| [
"rencheng_310813@qq.com"
] | rencheng_310813@qq.com |
dfd5f1ab44402a21ebfea238e5b70d78f4c08847 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-mrsp.0/mrsp_ut=3.5_rd=0.8_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=79/params.py | bbe63e8c0e1fb04e03a09bb6ca4fadafda488f37 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | {'cpus': 4,
'duration': 30,
'final_util': '3.530310',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.8',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'RUN',
'trial': 79,
'utils': 'uni-medium-3'}
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
7268d1341d3d7a853c011880de49a560eec8eace | 401aae1a063e98e2c98ba366e1a17f36f2bedb5c | /vbpp/tf_utils.py | b5b84eb09cb5213ff5d465d1a32e98155c6786e6 | [
"Apache-2.0"
] | permissive | zcmail/vbpp | 66df32f2d6268a16e8033c7a7b6871ffa9040296 | 00668f3b84b62a9ecf1f580630e8bb59df38ba87 | refs/heads/master | 2022-04-20T01:44:59.946539 | 2020-01-08T16:51:02 | 2020-01-08T16:51:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,138 | py | # Copyright (C) PROWLER.io 2017
#
# Licensed under the Apache License, Version 2.0
"""
Prototype Code! This code may not be fully tested, or in other ways fit-for-purpose.
Use at your own risk!
"""
import tensorflow as tf
def tf_squeeze_1d(A):
return tf.reshape(A, (-1,)) # TODO should check that it's got the same length as before
def tf_len(A):
return tf.shape(A)[0]
def tf_vec_dot(v1, v2):
"""
Calculate the dot product between v1 and v2, regardless of shapes, as long
as there is at most one dimension with a length > 1 in each vector.
"""
# turn into flat vectors:
v1 = tf.squeeze(v1)
v2 = tf.squeeze(v2)
#XXX assert v1.ndims == 1
#XXX assert v2.ndims == 1
return tf.reduce_sum(tf.multiply(v1, v2))
def tf_vec_mat_vec_mul(v1, M, v2):
"""
Calculate the bilinear form v1^T M v2, where
v1 and v2 are vectors of length N and M is a N x N matrix.
"""
#XXX assert tf.squeeze(v1).ndims == 1
#XXX assert tf.squeeze(v2).ndims == 1
v2 = tf.reshape(v2, [-1, 1]) # turn into column vector
M_dot_v2 = tf.matmul(M, v2)
return tf_vec_dot(v1, M_dot_v2)
| [
"noreply@github.com"
] | noreply@github.com |
62c317d22dbc558e16b7266be9ed1fa5ed9c3425 | b63ffc8eee170387fdb6bf41dde790c24338e80d | /Window_Statistical_Analysis.py | a089f5f349e73233617eb8016e2f98a29ebbd85f | [] | no_license | Taha248/Emotion-Recognition-System | ba143b0b050089dccc08229546f13b85c48ef8d0 | 988b7715bf1a6a6743759fbadfb1e0e312173883 | refs/heads/master | 2020-06-16T20:47:15.260452 | 2019-12-17T08:39:49 | 2019-12-17T08:39:49 | 195,698,964 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,533 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 30 14:02:09 2019
@author: Taha.Tauqir
"""
from statistical_analysis import statistical_analysis
import pandas as pd
import os
INPUT_PATH = "C:\\Users\\taha.tauqir\\Desktop\\output\\"
SAMPLE_ID = "SAMPLE0001"
class Window_Statistical_Analysis():
def __init__(self):
pass
def getCalculatedAttributes(self,df):
temperatureAttributes,GsrAttributes, BpmAttributes = [],[],[]
print(df)
Temperature=df['Temperature'].values
GSR=df['GSR'].values
BPM=df['BPM'].values
stats = statistical_analysis()
temperatureAttributes.append(stats.getMean(Temperature))
temperatureAttributes.append(stats.getAAV(Temperature))
temperatureAttributes.append(stats.getAAD(Temperature))
temperatureAttributes.append(stats.getVariance(Temperature))
temperatureAttributes.append(stats.getEnergy(Temperature))
temperatureAttributes.append(stats.getMeanCrossingRate(Temperature))
temperatureAttributes.append(stats.getRootMeanSquare(Temperature))
temperatureAttributes.append(stats.getSkewness(Temperature))
temperatureAttributes.append(stats.getKurtosis(Temperature))
temperatureAttributes.append(stats.getZeroCrossingRate(Temperature))
GsrAttributes.append(stats.getMean(GSR))
GsrAttributes.append(stats.getAAV(GSR))
GsrAttributes.append(stats.getAAD(GSR))
GsrAttributes.append(stats.getVariance(GSR))
GsrAttributes.append(stats.getEnergy(GSR))
GsrAttributes.append(stats.getMeanCrossingRate(GSR))
GsrAttributes.append(stats.getRootMeanSquare(GSR))
GsrAttributes.append(stats.getSkewness(GSR))
GsrAttributes.append(stats.getKurtosis(GSR))
GsrAttributes.append(stats.getZeroCrossingRate(GSR))
BpmAttributes.append(stats.getMean(BPM))
BpmAttributes.append(stats.getAAV(BPM))
BpmAttributes.append(stats.getAAD(BPM))
BpmAttributes.append(stats.getVariance(BPM))
BpmAttributes.append(stats.getEnergy(BPM))
BpmAttributes.append(stats.getMeanCrossingRate(BPM))
BpmAttributes.append(stats.getRootMeanSquare(BPM))
BpmAttributes.append(stats.getSkewness(BPM))
BpmAttributes.append(stats.getKurtosis(BPM))
BpmAttributes.append(stats.getZeroCrossingRate(Temperature))
return temperatureAttributes+GsrAttributes+BpmAttributes
| [
"Taha24888@gmail.com"
] | Taha24888@gmail.com |
48ea3aceba383a0062591d5722cc9cb8cc216f2a | 5b5867437d3f2ed4db9d8d18a90b3c8ad54e03c7 | /Shifts/database.py | 989406afd5b5e263a1b228fbe1a11fe4945193c5 | [] | no_license | amitbb93/AI-Scheduler | c9ff5d340d6e7343e0731dab4d8725ac8397657e | 8d3734d8850465ebe204404fb9cf01d13ee4a8ad | refs/heads/main | 2023-07-13T00:35:52.311869 | 2021-08-25T22:40:46 | 2021-08-25T22:40:46 | 316,464,719 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | import pyrebase
config = {
'apiKey': "AIzaSyDaQDp5xovMLcc7MlIrGzCAVn8K5byshwI",
'authDomain': "ai-scheduler-13579.firebaseapp.com",
'databaseURL': "https://ai-scheduler-13579-default-rtdb.europe-west1.firebasedatabase.app/",
'projectId': "ai-scheduler-13579",
'storageBucket': "ai-scheduler-13579.appspot.com",
'messagingSenderId': "798165661754",
'appId': "1:798165661754:web:3acd9cb6348eee3230dd03",
'measurementId': "G-5T4RB3DZV4"
}
firebase=pyrebase.initialize_app(config)
authe = firebase.auth()
database=firebase.database() | [
"amitbb93@gmail.com"
] | amitbb93@gmail.com |
646600322f93ff2c0453d17bf7823470b5dc6892 | 14421a12c4e80395567e676394d369fd9619bd32 | /Scripts/PythonMidLvl/84/84test.py | 06a05233ce41b852f56020436f8510d38948fc20 | [] | no_license | jawor92/Python-Udemy-Mobilo | 7b331e8197233c3116e43e0b3c1110b9b878762e | 8098508835121a1536c2753bc4eedbf17163c93d | refs/heads/master | 2020-12-09T21:39:09.366604 | 2020-01-12T19:31:09 | 2020-01-12T19:31:09 | 233,423,486 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 6 22:09:49 2020
@author: Mateusz.Jaworski
"""
class MailToSantaClaus:
def __init__(self, presents):
self.presents = presents.copy()
def show_presents(self):
print(self.presents)
mail = MailToSantaClaus(['Teddy Bear', 'Teddy Bear House'])
mail.show_presents() | [
"jaworski92@gmail.com"
] | jaworski92@gmail.com |
f99f4de448083fea0bcdb1d2128c8fd94c735caf | 8aa5ecbedf26847cde34c649f137f24bd997401a | /think_python/think_08.py | f9b78c7d25d49214838f389e9a1c1f32f3d0e216 | [] | no_license | fisherra/python_book_exercises | 40650f50cfc44d2475ec826bebab7eed352587c2 | 6f9210c1043ddc225b2e9354a8e990b55da5792c | refs/heads/master | 2020-03-31T03:50:53.056696 | 2018-12-13T02:08:29 | 2018-12-13T02:08:29 | 151,880,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,895 | py | # chapter 8 - strings
# strings are a sequence of characters
string = 'yes, this is dog'
letter = string[0]
print(string)
print('-' * 10 )
def letter_by_line(string):
n = 0
while n < len(string):
letter = string[n]
print(letter)
n = n + 1
letter_by_line('yes, this is dog')
print('-' * 10)
# traversal - running through a string, chr by chr
# printing slices
s = 'Monty Python'
print(s[0:5])
print(s[6:12])
# but you can just omit the first or second half
print(s[:5])
print(s[6:])
print('-' * 10)
# strings are immutable, you can't change an existing string
greetings = "Hello World!"
# greetings[0] = "j" <--- this won't fly
# but this will
new_greetings = "J" + greetings[1:]
print(greetings)
print(new_greetings)
print('-' * 10)
# searching
def find(word, letter):
index = 0
while index < len(word):
if word[index] == letter:
return index
else:
index = index + 1
return -1
miss_index = find("mississippi", "p")
print(miss_index)
print('-' * 10)
# counters
# count the i's in mississippi
def i_count(word):
index = 0
count = 0
while(index < len(word)):
if word[index] == 'i':
count = count + 1
index = index + 1
return count
print(i_count('mississippi'))
print('-' * 10)
# method
word = 'banana'
print(word.upper())
# methods are like functions but with different notation.
# target.method(arguement)
print(word.isdecimal())
print(word.find('na'))
print('-' * 10)
# the in op
print('n' in 'banana')
print('seed' in 'banana')
# simple!
print('-' * 10)
# comparisons on strings work alphabetically
if 'apple' > 'banana':
print('apple greater than banana')
if 'apple' < 'banana':
print('apple less than banana')
if 'banana' == 'banana':
print('banana is banana')
# 'lesser' words are earlier in the alphabet. a = 1.
| [
"fisherankney@gmail.com"
] | fisherankney@gmail.com |
6c2bda0345755e152e1819fa282be7e05a97e988 | 15e85b4d9527e7a87aded5b3c99ad9c785bca915 | /data-storage-manager-sdk/python/simcore_dsm_sdk/configuration.py | 422f971c74adb284286a59de28d37d9be9f11594 | [
"MIT"
] | permissive | mguidon/aiohttp-dsm | 4161f9977d3dffbb727aa26cce4e9fb347aa4e21 | 612e4c7f6f73df7d6752269965c428fda0276191 | refs/heads/master | 2020-03-30T09:03:49.791406 | 2018-10-02T07:05:35 | 2018-10-02T07:05:35 | 151,058,427 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,125 | py | # coding: utf-8
"""
dsm-api
dsm api # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "http://{host}:{port}/{version}"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("simcore_dsm_sdk")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if (self.api_key.get(identifier) and
self.api_key_prefix.get(identifier)):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 2.0.0\n"\
"SDK Package Version: 1.0.0".\
format(env=sys.platform, pyversion=sys.version)
| [
"guidon@itis.ethz.ch"
] | guidon@itis.ethz.ch |
fee008cee099325fe301508912ca78df158a3a07 | 1e8dc50cc12c7ecb03b996e514bb5336d93d62d1 | /cirq/protocols/qid_shape_protocol.py | 870a5042b3ca9fd2577c3162c09a200990c25f1b | [
"Apache-2.0"
] | permissive | rajeshkumarkarra/Cirq | 0a6a4a55bb176b8fdbef836b41b200b1d1711c40 | 5ad06cc7a487ca94436715a3c51b6a50dfd10513 | refs/heads/master | 2020-07-04T01:51:21.883521 | 2019-08-13T02:16:02 | 2019-08-13T02:16:02 | 202,114,207 | 1 | 0 | Apache-2.0 | 2019-08-13T09:49:17 | 2019-08-13T09:49:16 | null | UTF-8 | Python | false | false | 7,441 | py | # Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING, Any, Tuple, TypeVar, Union
from typing_extensions import Protocol
from cirq.type_workarounds import NotImplementedType
if TYPE_CHECKING:
import cirq
# This is a special indicator value used by the methods to determine whether or
# not the caller provided a 'default' argument. It must be of type
# Tuple[int, ...] to ensure the method has the correct type signature in that
# case. It is checked for using `is`, so it won't have a false positive if the
# user provides a different (0,) value.
RaiseTypeErrorIfNotProvided = (0,) # type: Any
# Equal integers outside the range [-5, 256] aren't identically equal with `is`.
RaiseTypeErrorIfNotProvidedInt = -2**512 # type: Any
TDefault = TypeVar('TDefault')
class SupportsExplicitQidShape(Protocol):
"""A unitary, channel, mixture or other object that operates on a known
number qubits/qudits/qids, each with a specific number of quantum levels."""
def _qid_shape_(self) -> Union[Tuple[int, ...], NotImplementedType]:
"""A tuple specifying the number of quantum levels of each qid this
object operates on, e.g. (2, 2, 2) for a three-qubit gate.
This method is used by the global `cirq.qid_shape` method (and by
`cirq.num_qubits` if `_num_qubits_` is not defined). If this
method is not present, or returns NotImplemented, it is assumed that the
receiving object operates on qubits. (The ability to return
NotImplemented is useful when a class cannot know if it has a shape
until runtime.)
The order of values in the tuple is always implicit with respect to the
object being called. For example, for gates the tuple must be ordered
with respect to the list of qubits that the gate is applied to. For
operations, the tuple is ordered to match the list returned by its
`qubits` attribute.
Returns:
A unitary matrix describing this value, or NotImplemented if the
shape is unknown.
"""
class SupportsExplicitNumQubits(Protocol):
"""A unitary, channel, mixture or other object that operates on a known
number of qubits."""
def _num_qubits_(self) -> Union[int, NotImplementedType]:
"""The number of qubits, qudits, or qids this object operates on.
This method is used by the global `cirq.num_qubits` method (and by
`cirq.qid_shape` if `_qid_shape_` is not defined. If this
method is not present, or returns NotImplemented, it will fallback
to using the length of `_qid_shape_`.
Returns:
An integer specifying the number of qubits, qudits or qids.
"""
def qid_shape(val: Any, default: TDefault = RaiseTypeErrorIfNotProvided
) -> Union[Tuple[int, ...], TDefault]:
"""Returns a tuple describing the number of quantum levels of each
qubit/qudit/qid `val` operates on.
Args:
val: The value to get the shape of.
default: Determines the fallback behavior when `val` doesn't have
a shape. If `default` is not set, a TypeError is raised. If
default is set to a value, that value is returned.
Returns:
If `val` has a `_qid_shape_` method and its result is not
NotImplemented, that result is returned. Otherwise, if `val` has a
`_num_qubits_` method, the shape with `num_qubits` qubits is returned
e.g. `(2,)*num_qubits`. If neither method returns a value other than
NotImplemented and a default value was specified, the default value is
returned.
Raises:
TypeError: `val` doesn't have either a `_qid_shape_` or a `_num_qubits_`
method (or they returned NotImplemented) and also no default value
was specified.
"""
getter = getattr(val, '_qid_shape_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented:
return result
# Fallback to _num_qubits_
num_getter = getattr(val, '_num_qubits_', None)
num_qubits = NotImplemented if num_getter is None else num_getter()
if num_qubits is not NotImplemented:
return (2,) * num_qubits
if default is not RaiseTypeErrorIfNotProvided:
return default
if getter is not None:
raise TypeError("object of type '{}' does have a _qid_shape_ method, "
"but it returned NotImplemented.".format(type(val)))
if num_getter is not None:
raise TypeError("object of type '{}' does have a _num_qubits_ method, "
"but it returned NotImplemented.".format(type(val)))
raise TypeError("object of type '{}' has no _num_qubits_ or _qid_shape_ "
"methods.".format(type(val)))
def num_qubits(val: Any, default: TDefault = RaiseTypeErrorIfNotProvidedInt
) -> Union[int, TDefault]:
"""Returns the number of qubits, qudits, or qids `val` operates on.
Args:
val: The value to get the number of qubits from.
default: Determines the fallback behavior when `val` doesn't have
a number of qubits. If `default` is not set, a TypeError is raised.
If default is set to a value, that value is returned.
Returns:
If `val` has a `_num_qubits_` method and its result is not
NotImplemented, that result is returned. Otherwise, if `val` has a
`_qid_shape_` method, the number of qubits is computed from the length
of the shape and returned e.g. `len(shape)`. If neither method returns a
value other than NotImplemented and a default value was specified, the
default value is returned.
Raises:
TypeError: `val` doesn't have either a `_num_qubits_` or a `_qid_shape_`
method (or they returned NotImplemented) and also no default value
was specified.
"""
num_getter = getattr(val, '_num_qubits_', None)
num_qubits = NotImplemented if num_getter is None else num_getter()
if num_qubits is not NotImplemented:
return num_qubits
# Fallback to _qid_shape_
getter = getattr(val, '_qid_shape_', None)
shape = NotImplemented if getter is None else getter()
if shape is not NotImplemented:
return len(shape)
if default is not RaiseTypeErrorIfNotProvidedInt:
return default
if num_getter is not None:
raise TypeError("object of type '{}' does have a _num_qubits_ method, "
"but it returned NotImplemented.".format(type(val)))
if getter is not None:
raise TypeError("object of type '{}' does have a _qid_shape_ method, "
"but it returned NotImplemented.".format(type(val)))
raise TypeError("object of type '{}' has no _num_qubits_ or _qid_shape_ "
"methods.".format(type(val)))
| [
"noreply@github.com"
] | noreply@github.com |
07d07ebf07f2f9bfa38da809ac2fdadf8a27cad5 | 6759fc67268b107259fed45032510ce4436d1f24 | /flaskblog/models.py | edc4ae01a4738a1e9c5d78e337e38877470d881b | [] | no_license | sanpj2292/Flask-Blog | bf5444c9f9832158e1ce6cb51c6b5e17eb8132c4 | 8a5b00e86f4764e96d5a8d07e3da5a6db21a2b91 | refs/heads/master | 2020-05-30T12:04:06.919145 | 2019-06-06T14:46:22 | 2019-06-06T14:46:22 | 189,722,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,148 | py | from datetime import datetime
from flaskblog import db, login_manager
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(150), unique=True, nullable=False)
image_file = db.Column(db.String(20), nullable=False, default='default.jpg')
password = db.Column(db.String(60), nullable=False)
posts = db.relationship('Post', backref='author', lazy=True)
def __repr__(self):
return 'User({},{},{})'.format(self.username,self.email,self.image_file)
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(300), nullable=False)
date_poster = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
content = db.Column(db.Text, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return 'Post({},{})'.format(self.title,self.date_poster) | [
"sankeerth2292@gmail.com"
] | sankeerth2292@gmail.com |
88c860d04a3b88ad9629fcde8f94e6e5c15def5c | fe105e3ed5867c63e57aa9b595fd83450513682e | /examples/line_wiki.py | 1004388f6c0a85bd5f52a62690d60ed7cdeab51a | [
"MIT"
] | permissive | zihaohe123/graph_embedding | 1fb2bb11f29b4f7eda69a97085744c47abb7d36e | 2a6f8214ce4b30b51eb9f1904b64fe782876f010 | refs/heads/master | 2022-05-27T16:35:51.106754 | 2020-03-22T20:48:08 | 2020-03-22T20:48:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,525 | py | import sys
sys.path.append('..')
import numpy as np
from ge.classify import read_node_label, Classifier
from ge import LINE
from sklearn.linear_model import LogisticRegression
import matplotlib.pyplot as plt
import networkx as nx
from sklearn.manifold import TSNE
def evaluate_embeddings(embeddings):
X, Y = read_node_label('../data/wiki/wiki_labels.txt')
tr_frac = 0.8
print("Training classifier using {:.2f}% nodes...".format(
tr_frac * 100))
clf = Classifier(embeddings=embeddings, clf=LogisticRegression())
clf.split_train_evaluate(X, Y, tr_frac)
def plot_embeddings(embeddings,):
X, Y = read_node_label('../data/wiki/wiki_labels.txt')
emb_list = []
for k in X:
emb_list.append(embeddings[k])
emb_list = np.array(emb_list)
model = TSNE(n_components=2)
node_pos = model.fit_transform(emb_list)
color_idx = {}
for i in range(len(X)):
color_idx.setdefault(Y[i][0], [])
color_idx[Y[i][0]].append(i)
for c, idx in color_idx.items():
plt.scatter(node_pos[idx, 0], node_pos[idx, 1], label=c)
plt.legend()
plt.show()
if __name__ == "__main__":
G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt', create_using=nx.DiGraph(),
nodetype=None, data=[('weight', int)])
model = LINE(G, embedding_size=128, order='all')
model.train(batch_size=1024, epochs=50, verbose=2)
embeddings = model.get_embeddings()
evaluate_embeddings(embeddings)
plot_embeddings(embeddings)
| [
"zihaohe@HEZIHAOs-MacBook-Pro.local"
] | zihaohe@HEZIHAOs-MacBook-Pro.local |
9dc2f41179046af30e66587a58463d5f16f82133 | a74cbda970626fc69ee362b48b16fb6ae69517f2 | /Exercices/Exo/dico.py | 3b1d44b9b2e1278cbea12a751abd341bb244d8ff | [] | no_license | Halimeda/Python_Pratice | bbe059e018c15fa3c1c8109a8c95ada87cdedc65 | c7411dbf41e7d6db8a3dea14bc17884749c488dd | refs/heads/master | 2020-05-28T10:30:43.000378 | 2019-10-13T11:59:11 | 2019-10-13T11:59:11 | 188,970,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | dico = {}
dico ['name'] = input("Choisi un nom : ")
dico ['power'] = int(input("Détermine ton niveau de pouvoir : "))
print(dico['name'])
print(str(dico['power'])) | [
"h.decock92@gmail.com"
] | h.decock92@gmail.com |
225202c04f0411bbe5f71a70247fc99044c6179c | 76bd865b90dc83107d93e376f863c61807786613 | /Esercizio-1/reducer2.py | 2bf1f5107dc45ac2fac24c9af6392b1c9f2a0561 | [] | no_license | mdelia17/bigdata-historical-stocks | 196993ebd7c703a4251416c10767358c1e0a0172 | d78fbb0c3ef5491c7a432fb46fc7de00c586e5d0 | refs/heads/main | 2023-05-06T15:35:02.414565 | 2021-05-22T15:37:30 | 2021-05-22T15:37:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,075 | py | #!/usr/bin/env python3
"""reducer.py"""
import sys
from datetime import datetime
TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S"
def percentuale(a, b):
return float("{:.5f}".format(float((b-a)/a*100)))
ticker_2_set = {}
for line in sys.stdin:
line = line.strip()
ticker, open, close, low, high, date = line.split("\t")
try:
open = float("{:.5f}".format(float(open)))
close = float("{:.5f}".format(float(close)))
low = float("{:.5f}".format(float(low)))
high = float("{:.5f}".format(float(high)))
date = datetime.strptime(date, TIMESTAMP_FORMAT)
except ValueError:
continue
if ticker not in ticker_2_set:
ticker_2_set[ticker] = [date,close,date,close,low,high,date,0,0,0]
if (open < close):
ticker_2_set[ticker][7] += 1
ticker_2_set[ticker][9] = date.year
ticker_2_set[ticker][8] = 1
else:
if (date < ticker_2_set[ticker][0]):
ticker_2_set[ticker][0] = date
ticker_2_set[ticker][1] = close
if (date > ticker_2_set[ticker][2]):
ticker_2_set[ticker][2] = date
ticker_2_set[ticker][3] = close
if (low < ticker_2_set[ticker][4]):
ticker_2_set[ticker][4] = low
if (high > ticker_2_set[ticker][5]):
ticker_2_set[ticker][5] = high
if (open < close):
if (date-ticker_2_set[ticker][6]).days == 1:
ticker_2_set[ticker][7] += 1
else:
ticker_2_set[ticker][7] = 1
if ticker_2_set[ticker][7] >= ticker_2_set[ticker][8]:
ticker_2_set[ticker][8] = ticker_2_set[ticker][7]
ticker_2_set[ticker][9] = date.year
else:
ticker_2_set[ticker][7] = 0
ticker_2_set[ticker][6] = date
sorted = sorted(ticker_2_set.items(), key=lambda x: x[1][2], reverse=True)
for t in sorted:
print("%s\t%s\t%s\t%f\t%f\t%f\t%i\t%s" % (t[0], t[1][0], t[1][2], percentuale(t[1][1],t[1][3]), t[1][5], t[1][4], t[1][8], t[1][9])) | [
"gianluca@192.168.1.47"
] | gianluca@192.168.1.47 |
150f2cb86820cff4acc1de49013f8a9a3dcaae95 | d81c593d93b3496f42bd7c425fa793760797ef3b | /leet-problems/344-reverse-string.py | ce40b107d35eae1ac3242cc35a5bec47e29931c5 | [] | no_license | diegoaleman/ciu-old | 335f79d2b704bb919dcbd5988912f037682f99f1 | 0a8e00dc0dc885561cf9fa7e58a1a2f847e4ffdc | refs/heads/master | 2020-04-04T21:00:16.763124 | 2019-04-08T03:11:42 | 2019-04-08T03:11:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | class Solution:
def reverseString(self, s):
"""
:type s: str
:rtype: str
"""
middle = len(s) // 2
length = len(s) - 1
l_s = list(s)
for x in range(middle):
l_s[x], l_s[length - x] = l_s[length - x], l_s[x]
s = ''.join(l_s)
return s
#
# super easy solution using python slices
# return s[::-1]
#
| [
"diegoalemtz@gmail.com"
] | diegoalemtz@gmail.com |
9bf89c7da6c1192e8c8268e53934744bdeeae1ae | 4b6e6435873402a9d6ba27e243d290559a91d05b | /RiggingTool/Modules/Animation/circleControlStretchyIK.py | 89a8706804707d49c2317658038cc78583aad41b | [] | no_license | pouyaz123/Python-character-pipeline | 203f6c859137e30b49b18c666ab795e6bbdfd788 | d0703a5df18818d027346e7dc713e41b503f0997 | refs/heads/master | 2021-01-22T23:26:42.230842 | 2014-11-11T00:33:32 | 2014-11-11T00:33:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,377 | py | CLASS_NAME = 'CircleControlStretchyIK'
TITLE = 'Circle-controlled Stretchy IK'
DESCRIPTION = 'This module provies stretchy IK on the joint chain,with the ability to lock the stretchiness with a 0->1 slider value. IK twist is controlled by an adjustable circle rotation control.'
import System.utils as utils
reload(utils)
import maya.cmds as cmds
import System.controlObject as controlObject
reload(controlObject)
import System.controlModule as controlModule
#reload(controlModule)
class CircleControlStretchyIK(controlModule.ControlModule):
def __init__(self,moduleNamespace):
controlModule.ControlModule.__init__(self,moduleNamespace)
def compatibleBlueprintModules(self):
return('HingeJoint',)
def install_custom(self,joints,moduleGrp,moduleContainer, createHandleControl=True, poleVectorAtRoot=True):
rootJoint = joints[1]
hingeJoint = joints[2]
endJoint = joints[3]
containedNodes = []
twistRotationAimer = cmds.group(empty=True,n=rootJoint+'_twistRotationAimer')
containedNodes.append(twistRotationAimer)
containedNodes.append(cmds.pointConstraint(rootJoint,twistRotationAimer,maintainOffset=False, n=twistRotationAimer+'_pointConstraint')[0])
cmds.pointConstraint(endJoint,twistRotationAimer,maintainOffset=False)
upVectorTarget = cmds.group(empty=True, n=rootJoint+'_twistRotationAimer_upVectorTarget')
containedNodes.append(upVectorTarget)
cmds.parent(upVectorTarget, hingeJoint,relative=True)
cmds.setAttr(upVectorTarget+'.translateZ',cmds.getAttr(hingeJoint+'.translateX'))
containedNodes.append(cmds.aimConstraint(endJoint, twistRotationAimer, maintainOffset=False, n=twistRotationAimer+'_aimConstraint', aimVector=[0.0,0.0,1.0],upVector=[1.0,0.0,0.0],worldUpType='object',worldUpObject=upVectorTarget)[0])
tempLocator = cmds.spaceLocator()[0]
cmds.parent(tempLocator, twistRotationAimer,relative=True)
cmds.setAttr(tempLocator+'.translateY',10)
twistRotationAimerPos = cmds.xform(twistRotationAimer,q=True, worldSpace=True, translation=True)
tempLocatorPos = cmds.xform(tempLocator,q=True, worldSpace=True, translation=True)
offsetVector = [tempLocatorPos[0] - twistRotationAimerPos[0],tempLocatorPos[1] - twistRotationAimerPos[1],tempLocatorPos[2] - twistRotationAimerPos[2]]
cmds.delete(tempLocator)
ikNodes = utils.basic_stretchy_IK(rootJoint,endJoint,container=moduleContainer,scaleCorrectionAttribute=self.blueprintNamespace+':module_grp.hierarchicalScale')
ikHandle = ikNodes['ikHandle']
rootPosLocator = ikNodes['rootLocator']
endPosLocator = ikNodes['endLocator']
poleVectorLocator = ikNodes['poleVectorObject']
stretchinessAttribute = ikNodes['stretchinessAttribute']
for node in [ikHandle,rootPosLocator,endPosLocator,poleVectorLocator]:
cmds.parent(node,moduleGrp,absolute=True)
if poleVectorAtRoot:
poleVectorPos = cmds.xform(rootJoint,q=True,worldSpace=True,translation=True)
else:
poleVectorPos = cmds.xform(endJoint,q=True,worldSpace=True,translation=True)
poleVectorPos[0] += offsetVector[0]
poleVectorPos[1] += offsetVector[1]
poleVectorPos[2] += offsetVector[2]
cmds.xform(poleVectorLocator,worldSpace=True,absolute=True,translation=poleVectorPos)
if createHandleControl:
name = 'ikHandleControl'
controlObjectInstance = controlObject.ControlObject()
handleControlInfo = controlObjectInstance.create(name,'cubeLocator.ma',self,lod=1,translation=True,rotation=False,globalScale=False,spaceSwitching=True)
handleControl = handleControlInfo[0]
handleRootParent = handleControlInfo[1]
cmds.parent(handleRootParent,moduleGrp,relative=True)
cmds.xform(handleControl,worldSpace=True,absolute=True,translation=cmds.xform(endPosLocator,q=True,worldSpace=True,translation=True))
pointConstraint = cmds.pointConstraint(handleControl,endPosLocator, maintainOffset=False,n=endPosLocator+'_pointConstraint')[0]
containedNodes.append(pointConstraint)
cmds.select(handleControl)
cmds.addAttr(at='float',minValue=0.0, maxValue=1.0, defaultValue=1.0,keyable=True,longName='stretchiness')
cmds.connectAttr(handleControl+'.stretchiness',stretchinessAttribute)
self.publishNameToModuleContainer(handleControl+'.stretchiness', 'stretchiness',publishToOuterContainers=True)
rotationCancellation = cmds.group(empty=True,n=self.blueprintNamespace+':'+self.moduleNamespace+':twistRotationCancellation')
containedNodes.append(rotationCancellation)
cmds.parent(rotationCancellation, twistRotationAimer,relative=True)
twistControlOffset = cmds.group(empty=True,n=self.blueprintNamespace + ':' + self.moduleNamespace + ':twistControlOffset')
containedNodes.append(twistControlOffset)
cmds.parent(twistControlOffset, rotationCancellation, relative=True)
twistControlObjectInstance = controlObject.ControlObject()
twistControlInfo = twistControlObjectInstance.create('twistControl','zAxisCircle.ma',self, lod=2,translation=False,rotation=[False,False,True],globalScale=False, spaceSwitching=False)
twistControl = twistControlInfo[0]
cmds.parent(twistControl,twistControlOffset,relative=True)
cmds.connectAttr(twistControl+'.rotateZ',ikHandle+'.twist')
pivotMultNode = cmds.shadingNode('multiplyDivide',asUtility=True,n=twistControl+'_invertOffset')
containedNodes.append(pivotMultNode)
cmds.connectAttr(twistControlOffset+'.translateX',pivotMultNode+'.input1X')
cmds.setAttr(pivotMultNode+'.input2X',-1)
cmds.connectAttr(pivotMultNode+'.output', twistControl+'.rotatePivot')
multNode = cmds.shadingNode('multiplyDivide', asUtility=True, n=rotationCancellation+'_invertRotateZ')
containedNodes.append(multNode)
cmds.connectAttr(twistControl+'.rotateZ',multNode+'.input1X')
cmds.setAttr(multNode+'.input2X',-1)
cmds.connectAttr(multNode+'.outputX',rotationCancellation+'.rotateZ')
cmds.parent(twistRotationAimer,moduleGrp, absolute=True)
ikJoints = [joints[1],joints[2],joints[3]]
jointName = utils.stripAllNamespaces(joints[1])[1]
creationPoseRoot = self.blueprintNamespace+':creationPose_'+jointName
creationPoseJoints = utils.findJointChain(creationPoseRoot)
targetJoints = [creationPoseJoints[0],creationPoseJoints[1],creationPoseJoints[2]]
utils.matchTwistAngle(twistControl+'.rotateZ',ikJoints,targetJoints)
offsetNode = cmds.shadingNode('plusMinusAverage',asUtility=True,n=twistControl+'_twistOffset')
containedNodes.append(offsetNode)
cmds.setAttr(offsetNode+'.input1D[0]',cmds.getAttr(twistControl+'.rotateZ'))
cmds.connectAttr(twistControl+'.rotateZ',offsetNode+'.input1D[1]')
cmds.connectAttr(offsetNode+'.output1D',ikHandle+'.twist',force=True)
utils.forceSceneUpdate()
cmds.setAttr(twistControl+'.rotateZ',0)
utils.addNodeToContainer(moduleContainer,containedNodes)
self.publishNameToModuleContainer(twistControlOffset+'.translateX','twistControlOffset',publishToOuterContainers=True)
cmds.setAttr(moduleGrp+'.lod',2)
return (ikNodes)
def UI(self,parentLayout):
ikHandleControl = self.blueprintNamespace + ':' + self.moduleNamespace + ':ikHandleControl'
if cmds.objExists(ikHandleControl):
controlObjectInstance = controlObject.ControlObject(ikHandleControl)
controlObjectInstance.UI(parentLayout)
cmds.attrControlGrp(attribute=ikHandleControl+'.stretchiness', label='Stretchiness')
twistControl = self.blueprintNamespace + ':' + self.moduleNamespace + ':twistControl'
controlObjectInstance = controlObject.ControlObject(twistControl)
controlObjectInstance.UI(parentLayout)
def UI_preferences(self,parentLayout):
twistOffset = self.blueprintNamespace + ':' + self.moduleNamespace + ':twistControlOffset'
cmds.attrControlGrp(attribute=twistOffset+'.translateX',label='Twist Offset')
def match(self,*args):
characterContainer = self.characterNamespaceOnly + ':character_container'
blueprintContainer = self.blueprintNamespace + ':module_container'
moduleContainer = self.blueprintNamespace + ':' + self.moduleNamespace + ':module_container'
containers = [characterContainer,blueprintContainer,moduleContainer]
for c in containers:
cmds.lockNode(c,lock=False, lockUnpublished=False)
ikJointsAll = utils.findJointChain(self.blueprintNamespace + ':' + self.moduleNamespace + ':joints_grp')
blueprintJointsAll = utils.findJointChain(self.blueprintNamespace + ':blueprint_joints_grp')
ikJoints = [ikJointsAll[1],ikJointsAll[2],ikJointsAll[3]]
blueprintJoints = [blueprintJointsAll[1],blueprintJointsAll[2],blueprintJointsAll[3]]
ikHandleControl = self.blueprintNamespace + ':' + self.moduleNamespace + ':ikHandleControl'
if cmds.objExists(ikHandleControl):
cmds.setAttr(ikHandleControl + '.stretchiness',1)
endPos = cmds.xform(blueprintJoints [2],q=True,worldSpace=True,translation=True)
cmds.xform(ikHandleControl,worldSpace=True,absolute=True, translation=endPos)
twistControl = self.blueprintNamespace + ':' + self.moduleNamespace + ':twistControl'
utils.matchTwistAngle(twistControl+'.rotateZ',ikJoints,blueprintJoints)
for c in containers:
cmds.lockNode(c,lock=True, lockUnpublished=True)
| [
"pouya_zadraf@yahoo.com"
] | pouya_zadraf@yahoo.com |
e5df317cfa1253b25b744ccfe00c7f732df1c379 | 87543720eb7d8d83a5bfdfbeed5981ae78193bb8 | /gitweb/bin/pygmentize | bd1a9886ae8a52bdb1880bac84109c727432d971 | [] | no_license | zlynn247/Website | ad365d4d8bfa70bfa145ed39d8187574e2a821a5 | 41a448cdbc9b52477cf12fcfe21be241b3b6d8b4 | refs/heads/master | 2020-04-02T12:36:06.882758 | 2018-10-24T05:07:57 | 2018-10-24T05:07:57 | 154,441,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | #!/home/zach/Documents/Projects/code/PythonWebsite/gitweb/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pygments.cmdline import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"zacharyslynn@gmail.com"
] | zacharyslynn@gmail.com | |
269b37764f29cf5798a560e87a0a3951a3c2eb2b | 0a2f8c037d1128a5776c97754f36afed53044041 | /article/models.py | ab3704606c0c0eda7e0aa9a3e0f1070f6da9290f | [] | no_license | dhgillani/DjangoSampleAdminPanel | a5a70bdf26305ebffbeddcfb1e415157db82a1b8 | c4cf5ab075a0a46c8f5a1673f277a44668d349ea | refs/heads/master | 2020-03-11T13:32:57.319419 | 2018-04-18T08:20:33 | 2018-04-18T08:20:33 | 130,028,126 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | from django.db import models
# Create your models here.
class Article(models.Model):
article_id = models.AutoField(primary_key=True)
article_heading = models.CharField(max_length=250)
article_body = models.TextField()
| [
"dhgillani@yahoo.com"
] | dhgillani@yahoo.com |
d7783685336f1bf40bc89a0ace6650f787ac65a5 | 4cb66808e193853e234f14df348a320b9321c445 | /temp.py | 753025731225fa1f9abd25363c2e82cf9655dba2 | [] | no_license | Nilanshrajput/Inter-IIT-Techmeet-Vision-Challenge | f75dc6bbe89cf74e2902ac9bd39cde0c022e74a2 | b6cfd09429f6e3bcdb717717ef439abe020ac68f | refs/heads/master | 2020-04-01T04:41:53.074598 | 2019-06-13T18:27:31 | 2019-06-13T18:27:31 | 152,873,894 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
train = pd.read_csv('../input/sign_mnist_train.csv')
test = pd.read_csv('../input/sign_mnist_test.csv') | [
"rajputnilansh@gmail.com"
] | rajputnilansh@gmail.com |
ed46b8a0637f0df523b2f2527f0ca62fc2afa03e | 0d3cfc74f6826be269be42b3058347cd37798f81 | /forca_v1.py | 7992f54ead4e23f2a0a7df893a3a7228c9adef42 | [] | no_license | aschaves1976/dsa-labs | 486b4e14be3d09c4e332a0363d495a273d616a19 | 14def7b7f3ef32dc4c3f737f06ae6f7e6216e0ed | refs/heads/master | 2022-12-09T11:23:53.400910 | 2020-09-11T19:17:00 | 2020-09-11T19:17:00 | 291,046,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,872 | py | # -*- coding: utf-8 -*-
# Hangman Game (Jogo da Forca)
# Programação Orientada a Objetos
# Import
import random
# Board (tabuleiro)
board = ['''
>>>>>>>>>>Hangman<<<<<<<<<<
+---+
| |
|
|
|
|
=========''', '''
+---+
| |
O |
|
|
|
=========''', '''
+---+
| |
O |
| |
|
|
=========''', '''
+---+
| |
O |
/| |
|
|
=========''', '''
+---+
| |
O |
/|\ |
|
|
=========''', '''
+---+
| |
O |
/|\ |
/ |
|
=========''', '''
+---+
| |
O |
/|\ |
/ \ |
|
=========''']
# Classe
class Hangman:
# Método Construtor
def __init__(self, word):
self.s_word = word
self.s_rigth_answer = ""
self.s_rigth_letters = ""
self.s_wrong_letters = ""
self.l_build_word = []
self.l_wrong_word = []
self.l_match_word = []
self.n_brdind = 0
self.n_auxind = 0
for i in range(len(self.s_word)):
self.l_build_word.append("_ ")
self.print_game_status()
self.hide_word()
# Método para adivinhar a letra
def guess(self, letter):
match_letter = self.s_word.count(letter)
if match_letter != 0:
self.l_match_word.append(letter)
self.n_auxind += 1
for i, v in enumerate(self.s_word):
if letter == v:
self.l_build_word[i] = v
self.hide_word()
else:
self.l_wrong_word.append(letter)
self.n_brdind += 1
self.n_auxind = self.n_brdind
self.print_game_status()
self.hide_word()
# Método para verificar se o jogo terminou
def hangman_over(self):
if self.hangman_won():
return True
elif self.n_brdind >= (len(board) - 1):
return True
else:
return False
# Método para verificar se o jogador venceu
def hangman_won(self):
if self.s_rigth_answer == self.s_word:
return True
else:
return False
# Método para não mostrar a letra no board
def hide_word(self):
self.s_rigth_answer = "".join(self.l_build_word)
if len(self.l_match_word) > 0:
self.s_rigth_letters = ", ".join(self.l_match_word)
if len(self.l_wrong_word) > 0:
self.s_wrong_letters = ", ".join(self.l_wrong_word)
print("\nPalavra: {}".format(str(self.s_rigth_answer)))
print("\nLetras erradas: {}".format(self.s_wrong_letters))
print("\nLetras corretas: {}".format(self.s_rigth_letters))
# Método para checar o status do game e imprimir o board na tela
def print_game_status(self):
if self.n_brdind == self.n_auxind:
print(board[self.n_brdind])
# Função para ler uma palavra de forma aleatória do banco de palavras
def rand_word():
with open("palavras.txt", "rt") as f:
bank = f.readlines()
# strip() é utilizado para retirar espaços a esquerda e a direita da palavra
return bank[random.randint(0, len(bank))].strip()
# Função Main - Execução do Programa
def main():
# Objeto
game = Hangman(rand_word())
# Enquanto o jogo não tiver terminado, print do status, solicita uma letra e faz a leitura do caracter
while not game.hangman_over():
letter = input("\nDigite uma letra: ")
game.guess(letter)
# Verifica o status do jogo
# game.print_game_status()
# De acordo com o status, imprime mensagem na tela para o usuário
if game.hangman_won():
print('\nParabéns! Você venceu!!')
else:
print('\nGame over! Você perdeu.')
print('A palavra era ' + game.s_word)
print('\nFoi bom jogar com você! Agora vá estudar!\n')
# Executa o programa
if __name__ == "__main__":
main()
| [
"alessandro.chaves1976@gmail.com"
] | alessandro.chaves1976@gmail.com |
f6e08f44f8601089857b58d9636e960c445525b9 | 1d59aee62718676ae18054f3d3b0f8cdb647dfd3 | /zui/bin/auth.py | a3f1094922c3d06c6182bbbf51a6da6ffcbe3a78 | [] | no_license | actank/zmon | 70f3d3c0de09bde85ffabb6bcf4571049186f663 | b59811f7cd28291cfbadf7db326c4e4f003ee15d | refs/heads/master | 2021-01-22T16:44:22.070839 | 2014-12-03T11:41:15 | 2014-12-03T11:41:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,037 | py | #!/usr/bin/env python
#-*- coding:UTF-8 -*-
import json
import web
import time
import os,sys
import logging
import urllib
import zmonlib
#SSO_URL = 'http://uuap.baidu.com'
SSO_URL = 'http://itebeta.baidu.com:8100'
render = web.template.render('templates')
class auth(object):
#登陆后的回调函数
def GET(self):
user_data = web.input(u=None, ticket=None)
if not user_data.u or not user_data.ticket:
return render.forbidden('')
service_url = '%s/zmon/auth?u=%s' % (web.ctx.homedomain, web.urlquote(user_data.u))
validate_url = '%s/validate?service=%s&ticket=%s' % (SSO_URL, web.urlquote(service_url), web.urlquote(user_data.ticket))
r = urllib.urlopen(validate_url).readlines()
if len(r) == 2 and r[0].strip() == 'yes':
web.config.session.sso_username = r[1].strip()
raise web.seeother(user_data.u)
else:
return render.forbidden('')
class logout(object):
def GET(self):
web.config.session.kill()
raise web.seeother('%s/logout' % SSO_URL)
#访问用户统一认证平台的单点登录系统
def uuap_sso():
sso_username = web.config.session.get('sso_username')
#如果有已经登陆则返回用户名,否则跳转到uuap登录页
if sso_username:
logging.debug('sso_username: %s' % sso_username)
return sso_username
service_url = '%s/zmon/auth?u=%s' % (web.ctx.homedomain, web.urlquote(web.ctx.homepath + web.ctx.fullpath))
raise web.seeother('%s/login?service=%s' % (SSO_URL, web.urlquote(service_url)))
#跟进用户名取到有权限的产品线
def getProcByName(userName):
ret = set()
ret.add('zTest')
sql = "select PRODUCT from user where USERNAME = '%s'" % userName
result = zmonlib.ExecSql(sql)
try:
record = result[0][0]
for product in eval(record):
ret.add(product)
except Exception,e:
pass
logging.debug("get PRODUCT from user by %s,return %s" % (userName, ret))
return ret
| [
"zhaixinrui@163.com"
] | zhaixinrui@163.com |
3c79582a91883b74e451e0e2255658243d00c489 | c1c0f44de906af90da1aeafa35c3780320b292ac | /setup.py | 1c0a1658385a12ff952286a5f50ee6acb966aa69 | [] | no_license | Gargeebhase/Discogs-Python-Wrapper | 696ca79c39d29323bf61635ecba1b7af8873ec67 | 67581337378346b308871310f862d93450e673b6 | refs/heads/master | 2023-05-25T05:13:26.132079 | 2020-06-09T04:01:26 | 2020-06-09T04:01:26 | 270,838,242 | 0 | 0 | null | 2023-05-22T20:44:19 | 2020-06-08T21:51:17 | Python | UTF-8 | Python | false | false | 596 | py | import pathlib
from setuptools import setup
# The directory containing this file
HERE = pathlib.Path(__file__).parent
# The text of the README file
README = (HERE / "README.md").read_text()
# This call to setup() does all the work
setup(
name="discogs-wrapper-python",
version="1.0.1",
packages=["discogs_wrapper"],
description="This is a python API wrapper for the discogs API.",
long_description=README,
long_description_content_type="text/markdown",
include_package_data=True,
author="Gargee Bhase",
author_email="gbhase2@gmail.com",
license="MIT",
) | [
"bhase@usc.edu"
] | bhase@usc.edu |
14b7d15f64f419181184d6af5c739890b8d7acaf | 12a72da6848ae461b995ec2fc6c4e1827be82803 | /common/monitor_bak.py | 69ab77e717e2eebdea993ff2d36e9666824bb3cb | [] | no_license | lim1942/coin_helper | f3ed40c07a049a00f052dfa3e59cee7eefe969cf | d34ce363371fd964d8c46d5dd04ca7c5eb7d35b4 | refs/heads/main | 2023-04-30T10:46:03.231440 | 2021-05-25T12:15:49 | 2021-05-25T12:15:49 | 366,247,314 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,442 | py | import json
import redis
import requests
import traceback
from threading import Thread,Lock
from datetime import datetime,timedelta
from coin_helper.settings import REDIS_URL
class RedisTool(object):
def __init__(self):
self.R = redis.Redis.from_url(REDIS_URL,decode_responses=True)
self.P = self.R.pipeline(transaction=False)
def set(self,k,v,ex):
self.R.set(k,v,ex=ex)
def get(self,k):
return self.R.get(k)
class Monitor:
redis_obj = RedisTool()
def __init__(self,**kwargs):
self.kwargs = kwargs
self.notify_lock = Lock()
self.last_notify_time = {}
self.long_before_time = datetime.now() - timedelta(days=1)
def record(self,k,v,ex=10):
try:
return self.redis_obj.set(k,v,ex)
except:
traceback.print_exc()
def compare(self,k,v,k2):
pass
def notify(self,k,message):
Thread(target=self._notify,args=(k,message)).start()
def _notify(self,k,message):
notify_time = datetime.now()
with self.notify_lock:
if notify_time - timedelta(hours=6) >= self.last_notify_time.get(k,self.long_before_time):
webhook='https://oapi.dingtalk.com/robot/send?access_token=494a793fe8aa1146b93baeef9aba96cbfa725e2ce6230c0eaa37bb682e06eea8'
header = {
"Content-Type": "application/json",
"Charset": "UTF-8"}
data ={
"msgtype": "text",
"text": {
"content": f"触发价差信号 {message}"
},
"at": {
"atMobiles":[
"13750872274"
],
"isAtAll": False
}}
ret = requests.post(url=webhook,data=json.dumps(data),headers=header).text
self.last_notify_time[k] = notify_time
return ret
class OkexMonitor(Monitor):
def __init__(self,**kwargs):
super(OkexMonitor, self).__init__(**kwargs)
self.variance_threshold = 0.05
def compare(self,k,v,k2):
try:
v = float(v)
v2 = float(self.redis_obj.get(k2))
variance = abs(v - v2)
variance_rate = variance/v
if variance_rate > self.variance_threshold:
message = f"【{k}:{v}】与【{k2}:{v2}】差异率大于{self.variance_threshold}, 差值{round(variance,6)} 差率{round(variance_rate,6)}"
self.notify(k,message)
except:
pass
# print(k,k2)
def okex_record(self,item):
self.record(item['instrument_id'],item['price'])
def okex_compare_1(self,item):
"""okex永续币本位,永续USDT 币币 三个市场两两对比"""
try:
instrument_id = item['instrument_id']
if instrument_id.endswith('USDT-SWAP'):
self.compare(instrument_id,item['price'],item['instrument_id'].split('-')[0]+'-USDT')
self.compare(instrument_id,item['price'],item['instrument_id'].split('-')[0]+'-USD-SWAP')
# 币本位永续和币币比较
elif instrument_id.endswith('USD-SWAP'):
self.compare(instrument_id,item['price'],item['instrument_id'].split('-')[0]+'-USDT')
except:
traceback.print_exc()
| [
"lim1942@163.com"
] | lim1942@163.com |
1696222304764094db9ce14ca708e4b6dae565b5 | 9287ddca2559df9df107f1ffab86d7132e5ebb93 | /lin.py | dfb06e8f8716719295f5296acbceb5333c6ba3fd | [] | no_license | QQYES/Keras-Test | 5dc9b20d763ab2a6be11a7d65bfacfae28b3add4 | 5f3b2285b560bb2e0facead73c1902112cd285bc | refs/heads/master | 2021-01-16T17:52:57.701061 | 2018-05-14T09:38:13 | 2018-05-14T09:38:13 | 100,020,005 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | file = open("onduty.csv", encoding='utf-8')
while 1:
line = file.readline()
if not line:
break
fields = line.split(",")
if (len(fields) < 4):
pass
duty_name = fields[0]
duty_type = fields[1]
duty_date = fields[2]
print('line:{}'.format(line))
print(duty_name, duty_type, duty_date)
print('duty_name:{},duty_type:{},duty_date:{}'.format(duty_name, duty_type, duty_date))
| [
"qqyesqq1@126.com"
] | qqyesqq1@126.com |
00bbe7b9bc5e114e278635b8cfb2f799516df498 | d538b6bae84256e241ebdc976bbe1a0d0b4ec7ee | /Warehouse_management/settings.py | 9f59ad4d3f82c8e678357382ec73d940ee0a6b83 | [] | no_license | Sangeethsajan/WarehouseManagement | 871b3661c74922f7b524232867e5dc5d7b3dee59 | 30731a7538fa6278e9744c3060aa93f7e37fed79 | refs/heads/master | 2023-02-10T03:59:19.142042 | 2021-01-08T15:23:03 | 2021-01-08T15:23:03 | 327,938,681 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,228 | py | """
Django settings for Warehouse_management project.
Generated by 'django-admin startproject' using Django 1.11.29.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h1x!iw)3+3pm9#(u(1i&gnzz$5pf(cqtdxh4)=oc(i6mpvel1x'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'productDetails',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Warehouse_management.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Warehouse_management.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'Warehouse',
'USER': 'postgres',
'PASSWORD': 'sangeeth',
'HOST': 'localhost'
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"31395590+Sangeethsajan@users.noreply.github.com"
] | 31395590+Sangeethsajan@users.noreply.github.com |
6d04ee8916683dc219aa0d4d5e414bca50305d47 | 625364caf0b32c6f1340f481400f0f0a40072477 | /downloads/convertMyData.py | fd97cd95eafe3a9b6b10f7cb4df7300799c32679 | [
"Apache-2.0"
] | permissive | VolumeRC/volumerc.org | bc4d66896d421f08b7b2fbb1cb3a26977748dbe8 | b11f775caa2fe016b0baf1ae9ed1183ceb740f45 | refs/heads/master | 2020-04-12T02:23:47.568603 | 2017-06-07T14:57:33 | 2017-06-07T14:57:33 | 20,882,795 | 6 | 2 | null | 2016-07-12T09:42:29 | 2014-06-16T11:42:22 | JavaScript | UTF-8 | Python | false | false | 5,409 | py | #!/usr/bin/env python
print """
This code was created by Luis Kabongo, Vicomtech-IK4 Copyright 2012-2013.
This application converts the slices found in a folder into a tiled 2D texture
image in PNG format (it assumes all files in the folder are of the same type
and dimensions). It uses Python with PIL, numpy and pydicom packages are
recommended for other formats.
Information links:
http://www.volumerc.org
http://demos.vicomtech.org
Contact mailto:volumerendering@vicomtech.org
"""
import os, errno
import sys
import getopt
import math
import array
from PIL import Image #this is required to manage the images
#This is the default size when loading a Raw image
sizeOfRaw = (512, 512)
#This determines if the endianness should be reversed
rawByteSwap = True
#Write here your own load data version here ...
def loadMyData(filename):
myImageSize = (32, 32)
im = Image.new("L", myImageSize)
putpix = im.im.putpixel
for y in range(myImageSize[1]):
for x in range(myImageSize[0]):
val = 0 #... here get the value from your file (make sure that 0 <= val <= 255 )
putpix((x,y), val)
return im
#This function uses the images retrieved with loadImgFunction (whould return a PIL.Image) and
# writes them as tiles within a new square Image.
# Returns a set of Image, size of a slice, number of slices and number of slices per axis
def ImageSlices2TiledImage(filenames, loadImgFunction=loadMyData):
filenames=sorted(filenames)
print "Desired load function=", loadImgFunction.__name__
size = loadImgFunction(filenames[0]).size
numberOfSlices = len(filenames)
slicesPerAxis = int(math.ceil(math.sqrt(numberOfSlices)))
imout = Image.new("L", (size[0]*slicesPerAxis, size[1]*slicesPerAxis))
i = 0
for filename in filenames:
im = loadImgFunction(filename)
row = int( (math.floor(i/slicesPerAxis)) * size[0] )
col = int( (i%slicesPerAxis) * size[1] )
box = ( int(col), int(row), int(col+size[0]), int(row+size[1]) )
imout.paste(im, box)
i+=1
print "processed slice : "+str(i)+"/"+str(numberOfSlices) #filename
return imout, size, numberOfSlices, slicesPerAxis
#This functions takes a (tiled) image and writes it to a png file with base filename outputFilename.
# It also writes several versions in different sizes determined by dimensions
def WriteVersions(tileImage,outputFilename,dimensions=[8192,4096,2048,1024]):
try:
print 'Creating folder',os.path.dirname(outputFilename),'...',
os.makedirs(os.path.dirname(outputFilename))
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(outputFilename)):
print 'was already there.'
else:
print ', folders might not be created, trying to write anyways...'
except:
print "Could not create folders, trying to write anyways..."
print "Writing complete image: "+outputFilename+"_full.png"
try:
tileImage.save(outputFilename+"_full.png", "PNG")
except:
print "Failed writing ",outputFilename+"_full.png"
for dim in dimensions:
if tileImage.size[0] > dim :
print "Writing "+str(dim)+"x"+str(dim)+" version: "+outputFilename+"_"+str(dim)+".png"
tmpImage = tileImage.resize((dim,dim))
try:
tmpImage.save(outputFilename+"_"+str(dim)+".png", "PNG")
except:
print "Failed writing ",outputFilename,"_",str(dim),".png"
#This function lists the files within a given directory dir
def listdir_fullpath(d):
return [os.path.join(d, f) for f in os.listdir(d)]
#This is the main program, it takes at least 2 arguments <InputFolder> and <OutputFilename>
def main(argv=None):
print "Parsing arguments..."
if argv is None:
argv = sys.argv
if len(argv) < 3:
print "Usage: command <InputFolder> <OutputFilename>"
print " <InputFolder> must contain only one series of DATA files to be processed"
print " <OutputFilename> must contain the path and base name of the desired output, extensions will be added automatically"
print "Note1: this version requires you to define the MyLoadData function."
print "Note2: this version does not process several DATA folders recursively."
print "You typed:", argv
return 2
filenames = listdir_fullpath(argv[1])
#Convert into a tiled image
if len(filenames) > 0:
imgTile, sliceResolution, numberOfSlices, slicesPerAxis = ImageSlices2TiledImage(filenames,loadMyData)
else:
print "No files found in that folder, check your parameters and your load function."
return 2
#Write a text file containing the number of slices for reference
try:
try:
print 'Creating folder',os.path.dirname(argv[2]),'...',
os.makedirs(os.path.dirname(argv[2]))
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(argv[2])):
print 'was already there.'
else:
print ', folders might not be created, trying to write anyways...'
except:
print ", could not create folders, trying to write anyways..."
with open(argv[2]+"_AtlasDim.txt",'w') as f:
f.write(str((numberOfSlices,(slicesPerAxis,slicesPerAxis))))
except:
print "Could not write a text file",argv[2]+"_AtlasDim.txt","containing dimensions (total slices, slices per axis):",(numberOfSlices,(slicesPerAxis,slicesPerAxis))
else:
print "Created",argv[2]+"_AtlasDim.txt","containing dimensions (total slices, slices per axis):",(numberOfSlices,(slicesPerAxis,slicesPerAxis))
#Output is written in different sizes
WriteVersions(imgTile, argv[2])
if __name__ == "__main__":
sys.exit(main())
| [
"arbelaitz@gmail.com"
] | arbelaitz@gmail.com |
24bf35686d2947132b3bc7a59e415e2e84e986d7 | 9b0c67a5813abbbfd4670b9b1689de83dab580ae | /seed.py | d03e7782f7a78835b918a85e98fa3c41bd30a056 | [] | no_license | galloway118/project-gold | 155aa8f9203d33bf4dc5443c08fa3d5f42fa89b2 | 285aebcdb30cd2f2d6323bbab7a1606bb481eac3 | refs/heads/master | 2020-12-22T09:37:05.371519 | 2020-01-28T13:25:06 | 2020-01-28T13:25:06 | 236,740,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,502 | py | from BookModel import Book, User
from BookModel import db
Tom = Book(name="chris", price="2.99", isbn='292020302')
Rish = Book(name="Rish", price="2.99", isbn='292020302')
Jack = Book(name="Jack", price="2.99", isbn='292020302')
Rhianon = Book(name="Rhianon", price="2.99", isbn='292020302')
Amelia = Book(name="Amelia", price="2.99", isbn='292020302')
Lewis = Book(name="Lewis", price="2.99", isbn='292020302')
db.drop_all()
db.create_all()
db.session.add(Tom)
db.session.add(Rish)
db.session.add(Jack)
db.session.add(Rhianon)
db.session.add(Amelia)
db.session.add(Lewis)
db.session.commit()
# SQLAlchemy database model
# class User(Base):
# def __init__(self, id_num=None, name=None, age=None):
# self.id_num = id_num
# self.name = name
# self.age = age
# def __str__(self):
# return "ID=%d, Name=%s, Age=%d" % (self.id_num, self.name, self.age)
# All seeders inherit from Seeder
# class DemoSeeder(Seeder):
# # run() will be called by Flask-Seeder
# def run(self):
# # Create a new Faker and tell it how to create User objects
# faker = Faker(
# cls=user,
# init={
# "id": generator.Sequence(),
# "name": generator.Name(),
# "age": generator.Integer(start=20, end=100)
# }
# )
# # Create 5 users
# for user in faker.create(5):
# print("Adding user: %s" % user)
# self.db.session.add(user)
| [
"galloway118@icloud.com"
] | galloway118@icloud.com |
032766afc8f0bd00858d9f6651c02ebf4e5264df | 49367dfe75964bc0ba664c1d7080526950708949 | /tests/test_dirac_transport_contact_width.py | 0335ea1778884f0422454065503a7012c62f6520 | [
"Apache-2.0"
] | permissive | mirzaelahi/quest | 93ae84c875430fa56e1de7235b5624e2bbb1e812 | c433175802014386c2b1bf3c8932cd66b0d37c8e | refs/heads/master | 2021-01-12T13:31:48.121874 | 2016-02-21T00:58:35 | 2016-02-21T00:58:35 | 69,840,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,388 | py | #!/usr/bin/python
"""
TI Surface transport simulator for p-n junction using QMICAD.
Author: K M Masum Habib <masum.habib@virginia.edu>
Last update: 03/26/2014
"""
import sys
import numpy as np
import math
from math import pi, tan, cos, sin
import qmicad
from qmicad.linspace import linspace
from qmicad.simulators.dirackp import *
##
# Run the simulation
def simulate():
# some constants
# Transport simulator
tr = Transport()
tr.HamType = tr.HAM_GRAPHENE_KP
tr.DevType = tr.COH_RGF_NON_UNI
tr.verbosity = vprint.MSG_NORMAL
# Simulation parameters ------------------------------------------
tr.VERSION_REQUIRED = "0.10.0"
tr.verbosity = vprint.MSG_DEBUG
# Do a dry run
tr.DryRun = False
#tr.DryRun = True
# Device structure
tr.gates = []; # Gate tuple (width, theta, voltage ratio)
tr.a = 5
tr.K = 1
# Device structure
tr.nw = 21
tr.nlc = 11
tr.nrc = 11
tr.th = 0
tr.ng1 = 10
tr.ns1 = 0
tr.ng2 = 10
thr = tr.th*pi/180;
# gate # 1 right edge
tr.addGateEdge(tr.ng1, 0, 1.0, 0.0)
# split # 1 right edge
tr.addGateEdge(tr.ns1, 0)
# gate # 2 right edge
tr.addGateEdge(tr.ng2 + tr.nw/2*tan(thr), tr.th, -1.0, 0.0)
# Get the total length
tr.nb = tr.computeDevLen();
# Output path
tr.OutPath = "./pn_TE"
# Calculation type
tr.Calculations["TE"] = 1
#tr.Calculations["n"] = [{"N":2, "Block":'All'}]
tr.Calculations["n"] = [{"N":2, "Block":4}]
#tr.Calculations["I"] = [{"N":2, "Block":'All'}]
tr.Calculations["I"] = [{"N":2, "From":0, "To":1},
{"N":2, "From":tr.nb-2, "To":tr.nb-1}]
# Output path
tr.OutPath += "/"
tr.OutFileName = "TR"
# Bias
tr.VGG = np.array([0.0]) # Gate voltage offset
tr.Vo = 0.0 # Built-in voltage
tr.VDD = np.array([0.0])
tr.rVS =-0.5 # source ratio
tr.rVD = 0.5 # drain ratio
# Energy range
tr.Emin =-0.5 # Minimum energy
tr.dE = 0.005 # Energy step
tr.Emax = 0.5-tr.dE+0.00001 # Maximum energy
tr.kT = 0.0259 # temperature
# --------------------------------------------------------------
if qmicad.version != tr.VERSION_REQUIRED:
msg = "QMICAD version mismatch. Required "
msg += str(tr.VERSION_REQUIRED) + ". Got "
msg += str(qmicad.version)
raise Exception(msg)
# Get the total length
tr.nb = tr.computeDevLen()
# Change lattice constant if requested
if hasattr(tr, 'a'):
if not hasattr(tr, 'hp'):
tr.hp = TISurfKpParams()
tr.hp.a = tr.a
if hasattr(tr, 'K'):
if not hasattr(tr, 'hp'):
tr.hp = TISurfKpParams()
tr.hp.K = tr.K
# Create the atomistic geometry
tr.createAtomicGeom()
# Add roughness if requested
if hasattr(tr, 'roughness'):
tr.createRoughEdges(tr.roughness)
# Generate the Hamiltonian
tr.generateHamiltonian()
# Set up the potential solver
tr.setupPotential()
#
# Gates and contacts
#
xmn = tr.xmn
xmx = tr.xmx
ymn = tr.ymn
ymx = tr.ymx
a = tr.hp.a
# Source contact
beg = xmn
end = beg + a
tmp = Quadrilateral(Point(beg, ymn), Point(end, ymn),
Point(end, ymx), Point(beg, ymx))
if hasattr(tr, 'rVS'):
tr.addSource(tmp, tr.rVS)
else:
tr.addSource(tmp)
# Gates and splits
beg = end
begb = end
begt = end
for gate in tr.gates:
tanth = tan(gate["th"]*pi/180.0)
sd = tr.nw*a/2.0*tanth
end = beg + gate["nl"]*a
endb = end - sd
endt = end + sd
tmp = Quadrilateral(Point(begb, ymn), Point(endb, ymn),
Point(endt, ymx), Point(begt, ymx))
beg = end
begb = endb
begt = endt
rVo = gate["rVo"]
rVG = gate["rVG"]
if rVo is None and rVo is None:
tr.addLinearRegion(tmp)
else:
tr.addGate(tmp, rVo, rVG)
# Drain contact
begb = endb
begt = endt
endb = begb + a
endt = begt + a
tmp = Quadrilateral(Point(begb, ymn), Point(xmx, ymn),
Point(xmx, ymx), Point(begt, ymx))
if hasattr(tr, 'rVD'):
tr.addDrain(tmp, tr.rVD)
else:
tr.addDrain(tmp)
# Run
tr.run()
"""
The main() function.
"""
def main(argv = None):
if argv is None:
argv = sys.argv
# Run the simulator
simulate()
return 0
# Inject some helpers function to Transport
def _addGateEdge(self, nl, th, rVo=None, rVG=None):
"""Adds gate or a linear region."""
self.gates.append({"nl":int(nl), "th":th, "rVo":rVo, "rVG":rVG})
Transport.addGateEdge = _addGateEdge
def _computeDevLen(self):
nb = 3;
for gate in self.gates:
nb = nb + gate["nl"]
return int(nb)
Transport.computeDevLen = _computeDevLen
"""
Entry point.
"""
if __name__ == "__main__":
sys.exit(main())
| [
"masum.habib@gmail.com"
] | masum.habib@gmail.com |
5d3a3b7c6c08b586432ba52bd164c433343d9646 | 1603072ba2e7e0d3a6beb6314e25a4ff75b7b02e | /myRandom.py | d1460b7c0d602beecd8e0101377ad875645c4c0b | [] | no_license | me-jyotii/OneMonth.github.io | 20a78412e1add3817bb4fc84b8f9b0d2d85b802b | 0f33bc28980a5dca4d637f80931f20f40a9b2b03 | refs/heads/master | 2023-02-28T21:23:10.932276 | 2021-02-13T11:26:52 | 2021-02-13T11:26:52 | 338,558,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | import random
bars = ["Rice",
"Pulaw",
"Puri",
"Kheer",
"Biryani",
"Chicken lolipop",
"Momos"]
people = ["Fish",
"Tadka",
"Halwa",
"Puwa",
"Souce",
"Chawal",
"Red Chilli Souce"]
random_dish1 = random.choice(bars)
random_dish2 = random.choice(people)
# random_friends = random.choice(people)
print(f"Would you like to eat {random_dish1} with {random_dish2} ")
| [
"jyotisjr76@gmail.com"
] | jyotisjr76@gmail.com |
a1f9641676acef26b8880cb6b32dc2290e304628 | d47b058c8e2d7509aea5e005f76fcf5d7fbff444 | /testing/test_collection.py | 5a1e9a0521ebde6683b2b11139ad9566d5cb4852 | [
"MIT"
] | permissive | geraldoandradee/pytest | 51296c2736ca38accee875f62c57127f3cd1e3a8 | 41ab848fa6716b35aa5f8eb92972e6a9721016d8 | refs/heads/master | 2020-04-09T20:41:54.392752 | 2013-10-10T22:01:56 | 2013-10-10T22:01:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,224 | py | import pytest, py
from _pytest.main import Session
class TestCollector:
def test_collect_versus_item(self):
from pytest import Collector, Item
assert not issubclass(Collector, Item)
assert not issubclass(Item, Collector)
def test_compat_attributes(self, testdir, recwarn):
modcol = testdir.getmodulecol("""
def test_pass(): pass
def test_fail(): assert 0
""")
recwarn.clear()
assert modcol.Module == pytest.Module
assert modcol.Class == pytest.Class
assert modcol.Item == pytest.Item
assert modcol.File == pytest.File
assert modcol.Function == pytest.Function
def test_check_equality(self, testdir):
modcol = testdir.getmodulecol("""
def test_pass(): pass
def test_fail(): assert 0
""")
fn1 = testdir.collect_by_name(modcol, "test_pass")
assert isinstance(fn1, pytest.Function)
fn2 = testdir.collect_by_name(modcol, "test_pass")
assert isinstance(fn2, pytest.Function)
assert fn1 == fn2
assert fn1 != modcol
if py.std.sys.version_info < (3, 0):
assert cmp(fn1, fn2) == 0
assert hash(fn1) == hash(fn2)
fn3 = testdir.collect_by_name(modcol, "test_fail")
assert isinstance(fn3, pytest.Function)
assert not (fn1 == fn3)
assert fn1 != fn3
for fn in fn1,fn2,fn3:
assert fn != 3
assert fn != modcol
assert fn != [1,2,3]
assert [1,2,3] != fn
assert modcol != fn
def test_getparent(self, testdir):
modcol = testdir.getmodulecol("""
class TestClass:
def test_foo():
pass
""")
cls = testdir.collect_by_name(modcol, "TestClass")
fn = testdir.collect_by_name(
testdir.collect_by_name(cls, "()"), "test_foo")
parent = fn.getparent(pytest.Module)
assert parent is modcol
parent = fn.getparent(pytest.Function)
assert parent is fn
parent = fn.getparent(pytest.Class)
assert parent is cls
def test_getcustomfile_roundtrip(self, testdir):
hello = testdir.makefile(".xxx", hello="world")
testdir.makepyfile(conftest="""
import pytest
class CustomFile(pytest.File):
pass
def pytest_collect_file(path, parent):
if path.ext == ".xxx":
return CustomFile(path, parent=parent)
""")
node = testdir.getpathnode(hello)
assert isinstance(node, pytest.File)
assert node.name == "hello.xxx"
nodes = node.session.perform_collect([node.nodeid], genitems=False)
assert len(nodes) == 1
assert isinstance(nodes[0], pytest.File)
class TestCollectFS:
def test_ignored_certain_directories(self, testdir):
tmpdir = testdir.tmpdir
tmpdir.ensure("_darcs", 'test_notfound.py')
tmpdir.ensure("CVS", 'test_notfound.py')
tmpdir.ensure("{arch}", 'test_notfound.py')
tmpdir.ensure(".whatever", 'test_notfound.py')
tmpdir.ensure(".bzr", 'test_notfound.py')
tmpdir.ensure("normal", 'test_found.py')
for x in tmpdir.visit("test_*.py"):
x.write("def test_hello(): pass")
result = testdir.runpytest("--collect-only")
s = result.stdout.str()
assert "test_notfound" not in s
assert "test_found" in s
def test_custom_norecursedirs(self, testdir):
testdir.makeini("""
[pytest]
norecursedirs = mydir xyz*
""")
tmpdir = testdir.tmpdir
tmpdir.ensure("mydir", "test_hello.py").write("def test_1(): pass")
tmpdir.ensure("xyz123", "test_2.py").write("def test_2(): 0/0")
tmpdir.ensure("xy", "test_ok.py").write("def test_3(): pass")
rec = testdir.inline_run()
rec.assertoutcome(passed=1)
rec = testdir.inline_run("xyz123/test_2.py")
rec.assertoutcome(failed=1)
class TestCollectPluginHookRelay:
def test_pytest_collect_file(self, testdir):
wascalled = []
class Plugin:
def pytest_collect_file(self, path, parent):
wascalled.append(path)
testdir.makefile(".abc", "xyz")
pytest.main([testdir.tmpdir], plugins=[Plugin()])
assert len(wascalled) == 1
assert wascalled[0].ext == '.abc'
def test_pytest_collect_directory(self, testdir):
wascalled = []
class Plugin:
def pytest_collect_directory(self, path, parent):
wascalled.append(path.basename)
testdir.mkdir("hello")
testdir.mkdir("world")
pytest.main(testdir.tmpdir, plugins=[Plugin()])
assert "hello" in wascalled
assert "world" in wascalled
class TestPrunetraceback:
def test_collection_error(self, testdir):
p = testdir.makepyfile("""
import not_exists
""")
result = testdir.runpytest(p)
assert "__import__" not in result.stdout.str(), "too long traceback"
result.stdout.fnmatch_lines([
"*ERROR collecting*",
"*mport*not_exists*"
])
def test_custom_repr_failure(self, testdir):
p = testdir.makepyfile("""
import not_exists
""")
testdir.makeconftest("""
import pytest
def pytest_collect_file(path, parent):
return MyFile(path, parent)
class MyError(Exception):
pass
class MyFile(pytest.File):
def collect(self):
raise MyError()
def repr_failure(self, excinfo):
if excinfo.errisinstance(MyError):
return "hello world"
return pytest.File.repr_failure(self, excinfo)
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*ERROR collecting*",
"*hello world*",
])
@pytest.mark.xfail(reason="other mechanism for adding to reporting needed")
def test_collect_report_postprocessing(self, testdir):
p = testdir.makepyfile("""
import not_exists
""")
testdir.makeconftest("""
import pytest
def pytest_make_collect_report(__multicall__):
rep = __multicall__.execute()
rep.headerlines += ["header1"]
return rep
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*ERROR collecting*",
"*header1*",
])
class TestCustomConftests:
def test_ignore_collect_path(self, testdir):
testdir.makeconftest("""
def pytest_ignore_collect(path, config):
return path.basename.startswith("x") or \
path.basename == "test_one.py"
""")
sub = testdir.mkdir("xy123")
sub.ensure("test_hello.py").write("syntax error")
sub.join("conftest.py").write("syntax error")
testdir.makepyfile("def test_hello(): pass")
testdir.makepyfile(test_one="syntax error")
result = testdir.runpytest("--fulltrace")
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed*"])
def test_ignore_collect_not_called_on_argument(self, testdir):
testdir.makeconftest("""
def pytest_ignore_collect(path, config):
return True
""")
p = testdir.makepyfile("def test_hello(): pass")
result = testdir.runpytest(p)
assert result.ret == 0
assert "1 passed" in result.stdout.str()
result = testdir.runpytest()
assert result.ret == 0
assert "1 passed" not in result.stdout.str()
def test_collectignore_exclude_on_option(self, testdir):
testdir.makeconftest("""
collect_ignore = ['hello', 'test_world.py']
def pytest_addoption(parser):
parser.addoption("--XX", action="store_true", default=False)
def pytest_configure(config):
if config.getvalue("XX"):
collect_ignore[:] = []
""")
testdir.mkdir("hello")
testdir.makepyfile(test_world="def test_hello(): pass")
result = testdir.runpytest()
assert result.ret == 0
assert "passed" not in result.stdout.str()
result = testdir.runpytest("--XX")
assert result.ret == 0
assert "passed" in result.stdout.str()
def test_pytest_fs_collect_hooks_are_seen(self, testdir):
conf = testdir.makeconftest("""
import pytest
class MyModule(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule(path, parent)
""")
sub = testdir.mkdir("sub")
p = testdir.makepyfile("def test_x(): pass")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*MyModule*",
"*test_x*"
])
def test_pytest_collect_file_from_sister_dir(self, testdir):
sub1 = testdir.mkpydir("sub1")
sub2 = testdir.mkpydir("sub2")
conf1 = testdir.makeconftest("""
import pytest
class MyModule1(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule1(path, parent)
""")
conf1.move(sub1.join(conf1.basename))
conf2 = testdir.makeconftest("""
import pytest
class MyModule2(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule2(path, parent)
""")
conf2.move(sub2.join(conf2.basename))
p = testdir.makepyfile("def test_x(): pass")
p.copy(sub1.join(p.basename))
p.copy(sub2.join(p.basename))
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*MyModule1*",
"*MyModule2*",
"*test_x*"
])
class TestSession:
def test_parsearg(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
subdir = testdir.mkdir("sub")
subdir.ensure("__init__.py")
target = subdir.join(p.basename)
p.move(target)
testdir.chdir()
subdir.chdir()
config = testdir.parseconfig(p.basename)
rcol = Session(config=config)
assert rcol.fspath == subdir
parts = rcol._parsearg(p.basename)
assert parts[0] == target
assert len(parts) == 1
parts = rcol._parsearg(p.basename + "::test_func")
assert parts[0] == target
assert parts[1] == "test_func"
assert len(parts) == 2
def test_collect_topdir(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
id = "::".join([p.basename, "test_func"])
# XXX migrate to inline_genitems? (see below)
config = testdir.parseconfig(id)
topdir = testdir.tmpdir
rcol = Session(config)
assert topdir == rcol.fspath
rootid = rcol.nodeid
#root2 = rcol.perform_collect([rcol.nodeid], genitems=False)[0]
#assert root2 == rcol, rootid
colitems = rcol.perform_collect([rcol.nodeid], genitems=False)
assert len(colitems) == 1
assert colitems[0].fspath == p
def test_collect_protocol_single_function(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
id = "::".join([p.basename, "test_func"])
topdir = testdir.tmpdir
items, hookrec = testdir.inline_genitems(id)
item, = items
assert item.name == "test_func"
newid = item.nodeid
assert newid == id
py.std.pprint.pprint(hookrec.hookrecorder.calls)
hookrec.hookrecorder.contains([
("pytest_collectstart", "collector.fspath == topdir"),
("pytest_make_collect_report", "collector.fspath == topdir"),
("pytest_collectstart", "collector.fspath == p"),
("pytest_make_collect_report", "collector.fspath == p"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith(p.basename)"),
("pytest_collectreport", "report.nodeid == '.'")
])
def test_collect_protocol_method(self, testdir):
p = testdir.makepyfile("""
class TestClass:
def test_method(self):
pass
""")
normid = p.basename + "::TestClass::()::test_method"
for id in [p.basename,
p.basename + "::TestClass",
p.basename + "::TestClass::()",
normid,
]:
items, hookrec = testdir.inline_genitems(id)
assert len(items) == 1
assert items[0].name == "test_method"
newid = items[0].nodeid
assert newid == normid
def test_collect_custom_nodes_multi_id(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
testdir.makeconftest("""
import pytest
class SpecialItem(pytest.Item):
def runtest(self):
return # ok
class SpecialFile(pytest.File):
def collect(self):
return [SpecialItem(name="check", parent=self)]
def pytest_collect_file(path, parent):
if path.basename == %r:
return SpecialFile(fspath=path, parent=parent)
""" % p.basename)
id = p.basename
items, hookrec = testdir.inline_genitems(id)
py.std.pprint.pprint(hookrec.hookrecorder.calls)
assert len(items) == 2
hookrec.hookrecorder.contains([
("pytest_collectstart",
"collector.fspath == collector.session.fspath"),
("pytest_collectstart",
"collector.__class__.__name__ == 'SpecialFile'"),
("pytest_collectstart",
"collector.__class__.__name__ == 'Module'"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith(p.basename)"),
#("pytest_collectreport",
# "report.fspath == %r" % str(rcol.fspath)),
])
def test_collect_subdir_event_ordering(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
aaa = testdir.mkpydir("aaa")
test_aaa = aaa.join("test_aaa.py")
p.move(test_aaa)
items, hookrec = testdir.inline_genitems()
assert len(items) == 1
py.std.pprint.pprint(hookrec.hookrecorder.calls)
hookrec.hookrecorder.contains([
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport",
"report.nodeid.startswith('aaa/test_aaa.py')"),
])
def test_collect_two_commandline_args(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
aaa = testdir.mkpydir("aaa")
bbb = testdir.mkpydir("bbb")
test_aaa = aaa.join("test_aaa.py")
p.copy(test_aaa)
test_bbb = bbb.join("test_bbb.py")
p.move(test_bbb)
id = "."
items, hookrec = testdir.inline_genitems(id)
assert len(items) == 2
py.std.pprint.pprint(hookrec.hookrecorder.calls)
hookrec.hookrecorder.contains([
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'aaa/test_aaa.py'"),
("pytest_collectstart", "collector.fspath == test_bbb"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'bbb/test_bbb.py'"),
])
def test_serialization_byid(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
items, hookrec = testdir.inline_genitems()
assert len(items) == 1
item, = items
items2, hookrec = testdir.inline_genitems(item.nodeid)
item2, = items2
assert item2.name == item.name
assert item2.fspath == item.fspath
def test_find_byid_without_instance_parents(self, testdir):
p = testdir.makepyfile("""
class TestClass:
def test_method(self):
pass
""")
arg = p.basename + ("::TestClass::test_method")
items, hookrec = testdir.inline_genitems(arg)
assert len(items) == 1
item, = items
assert item.nodeid.endswith("TestClass::()::test_method")
class Test_getinitialnodes:
def test_global_file(self, testdir, tmpdir):
x = tmpdir.ensure("x.py")
config = testdir.parseconfigure(x)
col = testdir.getnode(config, x)
assert isinstance(col, pytest.Module)
assert col.name == 'x.py'
assert col.parent.name == testdir.tmpdir.basename
assert col.parent.parent is None
for col in col.listchain():
assert col.config is config
def test_pkgfile(self, testdir):
testdir.chdir()
tmpdir = testdir.tmpdir
subdir = tmpdir.join("subdir")
x = subdir.ensure("x.py")
subdir.ensure("__init__.py")
config = testdir.parseconfigure(x)
col = testdir.getnode(config, x)
assert isinstance(col, pytest.Module)
assert col.name == 'subdir/x.py'
assert col.parent.parent is None
for col in col.listchain():
assert col.config is config
class Test_genitems:
def test_check_collect_hashes(self, testdir):
p = testdir.makepyfile("""
def test_1():
pass
def test_2():
pass
""")
p.copy(p.dirpath(p.purebasename + "2" + ".py"))
items, reprec = testdir.inline_genitems(p.dirpath())
assert len(items) == 4
for numi, i in enumerate(items):
for numj, j in enumerate(items):
if numj != numi:
assert hash(i) != hash(j)
assert i != j
def test_example_items1(self, testdir):
p = testdir.makepyfile('''
def testone():
pass
class TestX:
def testmethod_one(self):
pass
class TestY(TestX):
pass
''')
items, reprec = testdir.inline_genitems(p)
assert len(items) == 3
assert items[0].name == 'testone'
assert items[1].name == 'testmethod_one'
assert items[2].name == 'testmethod_one'
# let's also test getmodpath here
assert items[0].getmodpath() == "testone"
assert items[1].getmodpath() == "TestX.testmethod_one"
assert items[2].getmodpath() == "TestY.testmethod_one"
s = items[0].getmodpath(stopatmodule=False)
assert s.endswith("test_example_items1.testone")
print(s)
def test_matchnodes_two_collections_same_file(testdir):
testdir.makeconftest("""
import pytest
def pytest_configure(config):
config.pluginmanager.register(Plugin2())
class Plugin2:
def pytest_collect_file(self, path, parent):
if path.ext == ".abc":
return MyFile2(path, parent)
def pytest_collect_file(path, parent):
if path.ext == ".abc":
return MyFile1(path, parent)
class MyFile1(pytest.Item, pytest.File):
def runtest(self):
pass
class MyFile2(pytest.File):
def collect(self):
return [Item2("hello", parent=self)]
class Item2(pytest.Item):
def runtest(self):
pass
""")
p = testdir.makefile(".abc", "")
result = testdir.runpytest()
assert result.ret == 0
result.stdout.fnmatch_lines([
"*2 passed*",
])
res = testdir.runpytest("%s::hello" % p.basename)
res.stdout.fnmatch_lines([
"*1 passed*",
])
class TestNodekeywords:
def test_no_under(self, testdir):
modcol = testdir.getmodulecol("""
def test_pass(): pass
def test_fail(): assert 0
""")
l = list(modcol.keywords)
assert modcol.name in l
for x in l:
assert not x.startswith("_")
assert modcol.name in repr(modcol.keywords)
def test_issue345(self, testdir):
testdir.makepyfile("""
def test_should_not_be_selected():
assert False, 'I should not have been selected to run'
def test___repr__():
pass
""")
reprec = testdir.inline_run("-k repr")
reprec.assertoutcome(passed=1, failed=0)
| [
"holger@merlinux.eu"
] | holger@merlinux.eu |
ce3559546eb51aef224614fc873b34db20587221 | 9e13c5d52e0da95480af9eeb690e4f34f3c56739 | /lecture2/conditions.py | a7dcca8fc8ff88744e7fba35fa29b50c1c3227d7 | [] | no_license | kylehorton33/cs50webdev | 24ac71ea7084cd67ef7eb6f4444a57509500491e | 0852b971ccd51021b9a7a8304b259434acc1a7e3 | refs/heads/master | 2023-02-21T23:11:31.466501 | 2020-04-29T18:26:47 | 2020-04-29T18:26:47 | 255,963,260 | 0 | 0 | null | 2023-02-15T23:12:19 | 2020-04-15T15:42:33 | HTML | UTF-8 | Python | false | false | 107 | py | x = -28
if x > 0:
print("x is positive")
elif x < 0:
print("x is negative")
else:
print("x is zero") | [
"kylehorton33@gmail.com"
] | kylehorton33@gmail.com |
f11f188d17c63ae96e4f7b9adc85b020bca8843e | 2e609adb772399a08b21369f745503595d2a9761 | /JAN21_Codechef/DIVTHREE.py | 08bab7cd1930a8aed54beaa852961fdd8ca88d2b | [] | no_license | This-is-NB/CodeChef | 42c7380c364aa2d4009d088cafdc48f17e0a3be5 | 8f76be55d5d1d6ebe834f29f46a4be9dd2c904c7 | refs/heads/main | 2023-07-25T17:53:50.529439 | 2021-08-29T11:12:40 | 2021-08-29T11:12:40 | 401,021,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | for _ in range(int(input())):
n, k, d = map(int, input().split())
a = list(map(int, input().split()))
print("**", end="")
print(min(sum(a)//k, d))
| [
"57351822+This-is-NB@users.noreply.github.com"
] | 57351822+This-is-NB@users.noreply.github.com |
9ea9ef27a581e31b26cdfc98389f67fe44e661ac | 5346c931a5133ef036c7b5a89452bd0222b1cd92 | /SocialMedia/App/migrations/0047_auto_20200519_0121.py | 9631a4bf440728aa72013c54470779801214624b | [] | no_license | the-pythonists/letsChat | 7cac4af6c9b2e542f932be3f2715c4a8f7cbbc66 | df64d439ada9f5b9def2d566cd920a82d9fe3cab | refs/heads/master | 2022-11-14T14:11:57.085296 | 2020-07-01T18:07:59 | 2020-07-01T18:07:59 | 261,536,469 | 1 | 0 | null | 2020-05-29T17:49:20 | 2020-05-05T17:15:18 | HTML | UTF-8 | Python | false | false | 600 | py | # Generated by Django 3.0.3 on 2020-05-19 01:21
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('App', '0046_photos_album'),
]
operations = [
migrations.AddField(
model_name='album',
name='date',
field=models.DateTimeField(blank=True, default=datetime.datetime.now),
),
migrations.AddField(
model_name='photos',
name='date',
field=models.DateTimeField(blank=True, default=datetime.datetime.now),
),
]
| [
"mohammad.danish2694@gmail.com"
] | mohammad.danish2694@gmail.com |
9f8718228467350e2f519935d6a0cced5617f4d2 | 7b2830cfeda670fb70cb09daed4db370cc393e04 | /python/the_best_from_blast.py | 694584fe0a32d744218d7a651a4cbce14e092c49 | [] | no_license | nanshanjin/script | 579d08e75cd5aa6e4bf4e558cefa581c2dc99f2e | c9fbfceca6c23ba21d908eaf91af5ec5fcdbecc3 | refs/heads/master | 2022-03-15T19:56:30.958094 | 2019-11-21T05:20:23 | 2019-11-21T05:20:23 | 53,825,691 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | import os,sys,re
def addtodict2(thedict, key_a, key_b, val):
if key_a in thedict:
thedict[key_a].update({key_b: val})
else:
thedict.update({key_a:{key_b: val}})
mydata=file("At.vs.GbA.1e-5.m8.blast")
dicts={}
dicts2={}
for line in mydata:
line=line.strip().split("\t")
dicts2[line[0]+line[11]]=line
#dicts.setdefault(line[0],[]).append(line[11])##see https://www.cnblogs.com/ywl925/p/3810598.html
dicts.setdefault(line[0],{})[line[1]]=line[11]
#print dicts
bestfile=open("best.txt","w")
for key1 in dicts:
#print dicts2[key1+max(dicts[key1])]
#bestfile.write("%s\t%s\n" % (key1,max(dicts[key1])))
#bestfile.write("\t".join(dicts2[key1+max(dicts[key1])])+"\n")
maxkey=max(dicts[key1], key=dicts[key1].get)
print key1,dicts[key1][maxkey]
| [
"nanshangogo@163.com"
] | nanshangogo@163.com |
05b60a337fe7a12315b91c9f03f05cbc27accb90 | 5e48579f65ab45469469a5cf0cbef82bf2323585 | /CovIdMX withREact/Covid19AcuteMx_Support_App/account_app/forms.py | 015094a334d599f574668d60ee456db90449a046 | [] | no_license | jayquake/React-Applications | 9e5786e4402dfe9f4e33d4daef657adb40bae1c8 | b9f27872846e7e8b7da94f77c2120755909db572 | refs/heads/master | 2022-12-30T04:45:11.591814 | 2020-10-11T13:29:05 | 2020-10-11T13:29:05 | 284,446,444 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,164 | py | from django import forms
from django.db import transaction
from .models import User, Subject, DoctorProfile, PatientProfile, EMTProfile
from django.contrib.auth.forms import UserCreationForm
from . import models
class PatientRegisterForm(UserCreationForm):
interests = forms.ModelMultipleChoiceField(
queryset=Subject.objects.all(),
widget=forms.CheckboxSelectMultiple,
required=False
)
class Meta(UserCreationForm.Meta):
model = User
@transaction.atomic
def save(self):
user = super().save(commit=False)
user.is_patient = True
user.save()
patient = PatientProfile.objects.create(user=user)
patient.interests.add(*self.cleaned_data.get('interests'))
return user
class DoctorRegisterForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = User
def save(self, commit=True):
user = super().save(commit=False)
user.is_doctor = True
if commit:
user.save()
return user
class PatientUpdateFrom(forms.ModelForm):
class Meta:
model = User
fields = ['username', 'email']
class PatientProfileUpdateForm(forms.ModelForm):
class Meta:
model = PatientProfile
fields = ['image']
class DoctorUpdateFrom(forms.ModelForm):
class Meta:
model = User
fields = ['first_name', 'last_name', 'email']
class DoctorProfileUpdateForm(forms.ModelForm):
class Meta:
model = models.DoctorProfile
email = forms.EmailField()
first_name = forms.CharField(max_length=50)
last_name = forms.CharField(max_length=50)
about_me = forms.Textarea()
resume = forms.FileInput
job_title = forms.ChoiceField
fields = ['image', 'about_me', 'resume']
# class patientRegisterForm(UserCreationForm):
# USER_SCHOOL_CHOICES = ((1, 'High School'),
# (2, 'Some College'),
# (3, 'Associates Degree'),
# (4, 'Bachelors Degree'),
# (5, 'Masters Degree'),
# (6, 'Other'),
# )
# email = forms.EmailField()
# first_name = forms.CharField(max_length=50)
# last_name = forms.CharField(max_length=50)
# academics = forms.Select(choices=USER_SCHOOL_CHOICES)
#
# class Meta:
# model = User
# fields = ['first_name', 'last_name', 'username', 'email', 'password1', 'password2']
#
#
# class DoctorRegisterForm(UserCreationForm):
# USER_Grade_Taught_CHOICES = ((1, 'Kindergarten'),
# (2, 'first grade '),
# (3, 'second grade '),
# (4, 'third grade'),
# (5, 'Fourth Grade'),
# (6, 'Fifth Grade'),
# (7, 'Sixth Grade'),
# (8, 'Seventh Grade'),
# (9, 'Eighth Grade'),
# (10, 'Ninth Grade'),
# (11, ' Grade'),
# )
# email = forms.EmailField()
# first_name = forms.CharField(max_length=50)
# last_name = forms.CharField(max_length=50)
# highest_education_level = forms.Select()
# grade_taught = forms.SelectMultiple(USER_Grade_Taught_CHOICES)
#
# class Meta:
# model = User
# form_class = DoctorRegisterForm
# template_name = 'registration/signup_form.html'
# fields = ['first_name', 'last_name', 'username', 'email', 'password1', 'password2',]
#
#
# def get_context_data(self, **kwargs):
# kwargs['user_type'] = 'Doctor'
# return super().get_context_data(**kwargs)
#
#
#
#
#
#
# email = forms.EmailField()
# first_name = forms.CharField(max_length=50)
# last_name = forms.CharField(max_length=50)
# about_me = forms.Textarea(max_length=150)
# resume = forms.FileInput
# job_title = forms.ChoiceField
# languages = forms.LanguageField(max_length=8, blank=True)
# region = forms.RegionField(blank=True) | [
"jayquake@gmail.com"
] | jayquake@gmail.com |
3f6521bf8afa7ea008e775bbaf415bf9906cbac2 | 97aac8c77fb7f2ad9bf8bfd24c320ba08491f723 | /csv_quote_game.py | 007513171c1681e87b54e63b7811ae0c4a0606dd | [] | no_license | agparkes/python_scraping_game_app | ac3800f56bfe2baf29e4657e15cc87745fd3b240 | 16f9c5002bc5e799157880c9e0a11a29d7513361 | refs/heads/master | 2022-06-16T14:09:08.767821 | 2020-05-12T00:44:12 | 2020-05-12T00:44:12 | 261,931,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,078 | py | # http://quotes.toscrape.com
import requests
from bs4 import BeautifulSoup
from random import choice
from csv import DictReader
BASE_URL = "http://quotes.toscrape.com"
def read_quotes(filename):
with open(filename, "r") as file:
csv_reader = DictReader(file)
return list(csv_reader)
def start_game(quotes):
quote = choice(quotes)
remaining_guesses = 4
print("Here's a quote: ")
print(quote["text"])
print(quote["author"])
guess = ""
while guess.lower() != quote["author"].lower() and remaining_guesses > 0:
guess = input(f"Who is the author of this quote? You have {remaining_guesses} guesses remaining: ")
if guess.lower() == quote["author"].lower():
print("YEAH, YOU GOT IS RIGHT!!!")
break
remaining_guesses -= 1
print_hint(quote, remaining_guesses)
again = ""
while again.lower() not in ("y", "yes", "yeah", "n", "no", "nop"):
again = input(f"Would you like to continue playing (y/n)?: ")
if again.lower() in ("y", "yes", "yeah"):
return start_game(quotes)
else:
print("Thanks for playing!!!")
def print_hint(quote, remaining_guesses):
if remaining_guesses == 3:
res = requests.get(f"{BASE_URL}{quote['bio-link']}")
soup = BeautifulSoup(res.text, "html.parser")
birth_date = soup.find(class_= "author-born-date").get_text()
birth_location = soup.find(class_= "author-born-location").get_text()
print(f"Here's a hint: The author was born on {birth_date}, {birth_location}")
elif remaining_guesses == 2:
print(f"Here's another hint: The author's first name starts with {quote['author'][0]}: ")
elif remaining_guesses == 1:
author_lastname_initial = quote["author"].split(" ")[1][0]
print(f"Here's the final hint: The author's last name starts with {author_lastname_initial}: ")
else:
print(f"Sorry, you ran out of guesses. The answer is: {quote['author']}")
quotes = read_quotes("quotes.csv")
start_game(quotes) | [
"agparkes413@gmail.com"
] | agparkes413@gmail.com |
b56c667458fbe3f787e21688fed454e706fab48a | 64f4ae3ceec4335e11800c1ffd12d0f651cba6e3 | /codes/graphs.py | 274d806807eff4988514bf761de73bbf1fc8326f | [] | no_license | THUNLP-AIPoet/WMPoetry | b83c922c6a4bbbeb673a69102efbbe6d0b8b9aa3 | d88c7d4e49b1e48d6c23f9cda12b8dbef59beac1 | refs/heads/master | 2022-10-08T03:22:30.377450 | 2020-06-11T15:48:03 | 2020-06-11T15:48:03 | 271,443,952 | 38 | 14 | null | null | null | null | UTF-8 | Python | false | false | 18,392 | py | # -*- coding: utf-8 -*-
# @Author: Xiaoyuan Yi
# @Last Modified by: Xiaoyuan Yi
# @Last Modified time: 2020-06-11 20:16:17
# @Email: yi-xy16@mails.tsinghua.edu.cn
# @Description:
'''
Copyright 2020 THUNLP Lab. All Rights Reserved.
This code is part of the online Chinese poetry generation system, Jiuge.
System URL: https://jiuge.thunlp.cn/ and https://jiuge.thunlp.org/.
Github: https://github.com/THUNLP-AIPoet.
'''
import random
from itertools import chain
import torch
from torch import nn
import torch.nn.functional as F
from layers import BidirEncoder, Decoder, MLP, ContextLayer, AttentionReader, AttentionWriter
def get_non_pad_mask(seq, pad_idx, device):
# seq: [B, L]
assert seq.dim() == 2
# [B, L]
mask = seq.ne(pad_idx).type(torch.float)
return mask.to(device)
def get_seq_length(seq, pad_idx, device):
mask = get_non_pad_mask(seq, pad_idx, device)
# mask: [B, T]
lengths = mask.sum(dim=-1).long()
return lengths
class WorkingMemoryModel(nn.Module):
def __init__(self, hps, device):
super(WorkingMemoryModel, self).__init__()
self.hps = hps
self.device = device
self.global_trace_size = hps.global_trace_size
self.topic_trace_size = hps.topic_trace_size
self.topic_slots = hps.topic_slots
self.his_mem_slots = hps.his_mem_slots
self.vocab_size = hps.vocab_size
self.mem_size = hps.mem_size
self.sens_num = hps.sens_num
self.pad_idx = hps.pad_idx
self.bos_tensor = torch.tensor(hps.bos_idx, dtype=torch.long, device=device)
# ----------------------------
# build componets
self.layers = nn.ModuleDict()
self.layers['word_embed'] = nn.Embedding(hps.vocab_size,
hps.word_emb_size, padding_idx=hps.pad_idx)
# NOTE: We set fixed 33 phonology categories: 0~32
# please refer to preprocess.py for more details
self.layers['ph_embed'] = nn.Embedding(33, hps.ph_emb_size)
self.layers['len_embed'] = nn.Embedding(hps.sen_len, hps.len_emb_size)
self.layers['encoder'] = BidirEncoder(hps.word_emb_size, hps.hidden_size, drop_ratio=hps.drop_ratio)
self.layers['decoder'] = Decoder(hps.hidden_size, hps.hidden_size, drop_ratio=hps.drop_ratio)
# project the decoder hidden state to a vocanbulary-size output logit
self.layers['out_proj'] = nn.Linear(hps.hidden_size, hps.vocab_size)
# update the context vector
self.layers['global_trace_updater'] = ContextLayer(hps.hidden_size, hps.global_trace_size)
self.layers['topic_trace_updater'] = MLP(self.mem_size+self.topic_trace_size,
layer_sizes=[self.topic_trace_size], activs=['tanh'], drop_ratio=hps.drop_ratio)
# MLP for calculate initial decoder state
self.layers['dec_init'] = MLP(hps.hidden_size*2, layer_sizes=[hps.hidden_size],
activs=['tanh'], drop_ratio=hps.drop_ratio)
self.layers['key_init'] = MLP(hps.hidden_size*2, layer_sizes=[hps.hidden_size],
activs=['tanh'], drop_ratio=hps.drop_ratio)
# history memory reading and writing layers
# query: concatenation of hidden state, global_trace and topic_trace
self.layers['memory_read'] = AttentionReader(
d_q=hps.hidden_size+self.global_trace_size+self.topic_trace_size+self.topic_slots,
d_v=hps.mem_size, drop_ratio=hps.attn_drop_ratio)
self.layers['memory_write'] = AttentionWriter(hps.mem_size+self.global_trace_size, hps.mem_size)
# NOTE: a layer to compress the encoder hidden states to a smaller size for larger number of slots
self.layers['mem_compress'] = MLP(hps.hidden_size*2, layer_sizes=[hps.mem_size],
activs=['tanh'], drop_ratio=hps.drop_ratio)
# [inp, attns, ph_inp, len_inp, global_trace]
self.layers['merge_x'] = MLP(
hps.word_emb_size+hps.ph_emb_size+hps.len_emb_size+hps.global_trace_size+hps.mem_size,
layer_sizes=[hps.hidden_size],
activs=['tanh'], drop_ratio=hps.drop_ratio)
# two annealing parameters
self._tau = 1.0
self._teach_ratio = 0.8
# ---------------------------------------------------------
# only used for for pre-training
self.layers['dec_init_pre'] = MLP(hps.hidden_size*2,
layer_sizes=[hps.hidden_size],
activs=['tanh'], drop_ratio=hps.drop_ratio)
self.layers['merge_x_pre'] = MLP(
hps.word_emb_size+hps.ph_emb_size+hps.len_emb_size,
layer_sizes=[hps.hidden_size],
activs=['tanh'], drop_ratio=hps.drop_ratio)
#---------------------------------
def set_tau(self, tau):
if 0.0 < tau <= 1.0:
self.layers['memory_write'].set_tau(tau)
def get_tau(self):
return self.layers['memory_write'].get_tau()
def set_teach_ratio(self, teach_ratio):
if 0.0 < teach_ratio <= 1.0:
self._teach_ratio = teach_ratio
def get_teach_ratio(self):
return self._teach_ratio
def set_null_idxes(self, null_idxes):
self.null_idxes = null_idxes.to(self.device).unsqueeze(0)
#---------------------------------
def compute_null_mem(self, batch_size):
# we initialize the null memory slot with an average of stop words
# by supposing that the model could learn to ignore these words
emb_null = self.layers['word_embed'](self.null_idxes)
# (1, L, 2*H)
enc_outs, _ = self.layers['encoder'](emb_null)
# (1, L, 2 * H) -> (1, L, D)
null_mem = self.layers['mem_compress'](enc_outs)
# (1, L, D)->(1, 1, D)->(B, 1, D)
self.null_mem = null_mem.mean(dim=1, keepdim=True).repeat(batch_size, 1, 1)
def computer_topic_memory(self, keys):
# (B, key_len)
emb_keys = [self.layers['word_embed'](key) for key in keys]
key_lens = [get_seq_length(key, self.pad_idx, self.device) for key in keys]
batch_size = emb_keys[0].size(0)
# length == 0 means this is am empty topic slot
topic_mask = torch.zeros(batch_size, self.topic_slots,
dtype=torch.float, device=self.device).bool() # (B, topic_slots)
for step in range(0, self.topic_slots):
topic_mask[:, step] = torch.eq(key_lens[step], 0)
key_states_vec, topic_slots = [], []
for step, (emb_key, length) in enumerate(zip(emb_keys, key_lens)):
# we set the length of empty keys to 1 for parallel processing,
# which will be masked then for memory reading
length.masked_fill_(length.eq(0), 1)
_, state = self.layers['encoder'](emb_key, length)
# (2, B, H) -> (B, 2, H) -> (B, 2*H)
key_state = state.transpose(0, 1).contiguous().view(batch_size, -1)
mask = (1 - topic_mask[:, step].float()).unsqueeze(1) # (B, 1)
key_states_vec.append((key_state*mask).unsqueeze(1))
topic = self.layers['mem_compress'](key_state)
topic_slots.append((topic*mask).unsqueeze(1))
# (B, topic_slots, mem_size)
topic_mem = torch.cat(topic_slots, dim=1)
# (B, H)
key_init_state = self.layers['key_init'](
torch.cat(key_states_vec, dim=1).sum(1))
return topic_mem, topic_mask, key_init_state
def computer_local_memory(self, inps, with_length):
batch_size = inps.size(0)
if with_length:
length = get_seq_length(inps, self.pad_idx, self.device)
else:
length = None
emb_inps = self.layers['word_embed'](inps)
# outs: (B, L, 2 * H)
# states: (2, B, H)
enc_outs, enc_states = self.layers['encoder'](emb_inps, length)
init_state = self.layers['dec_init'](enc_states.transpose(0, 1).
contiguous().view(batch_size, -1))
# (B, L, 2 * H) -> (B, L, D)
local_mem = self.layers['mem_compress'](enc_outs)
local_mask = torch.eq(inps, self.pad_idx)
return local_mem, local_mask, init_state
def update_global_trace(self, old_global_trace, dec_states, dec_mask):
states = torch.cat(dec_states, dim=2) # (B, H, L)
global_trace = self.layers['global_trace_updater'](
old_global_trace, states*(dec_mask.unsqueeze(1)))
return global_trace
def update_topic_trace(self, topic_trace, topic_mem, concat_aligns):
# topic_trace: (B, topic_trace_size+topic_slots)
# concat_aligns: (B, L_gen, mem_slots)
# 1: topic memory, 2: history memory 3: local memory
topic_align = concat_aligns[:, :, 0:self.topic_slots].mean(dim=1) # (B, topic_slots)
# (B, topic_slots, mem_size) * (B, topic_slots, 1) -> (B, topic_slots, mem_size)
# -> (B, mem_size)
topic_used = torch.mul(topic_mem, topic_align.unsqueeze(2)).mean(dim=1)
new_topic_trace = self.layers['topic_trace_updater'](
torch.cat([topic_trace[:, 0:self.topic_trace_size], topic_used], dim=1))
read_log = topic_trace[:, self.topic_trace_size:] + topic_align
fin_topic_trace = torch.cat([new_topic_trace, read_log], dim=1)
return fin_topic_trace
def dec_step(self, inp, state, ph, length, total_mem, total_mask,
global_trace, topic_trace):
emb_inp = self.layers['word_embed'](inp)
emb_ph = self.layers['ph_embed'](ph)
emb_len = self.layers['len_embed'](length)
# query for reading read memory
# (B, 1, H]
query = torch.cat([state, global_trace, topic_trace], dim=1).unsqueeze(1)
# attns: (B, 1, mem_size), align: (B, 1, L)
attns, align = self.layers['memory_read'](query, total_mem, total_mem, total_mask)
x = torch.cat([emb_inp, emb_ph, emb_len, attns, global_trace], dim=1).unsqueeze(1)
x = self.layers['merge_x'](x)
cell_out, new_state = self.layers['decoder'](x, state)
out = self.layers['out_proj'](cell_out)
return out, new_state, align
def run_decoder(self, inps, trgs, phs, lens, key_init_state,
history_mem, history_mask, topic_mem, topic_mask, global_trace, topic_trace,
specified_teach_ratio):
local_mem, local_mask, init_state = \
self.computer_local_memory(inps, key_init_state is None)
if key_init_state is not None:
init_state = key_init_state
if specified_teach_ratio is None:
teach_ratio = self._teach_ratio
else:
teach_ratio = specified_teach_ratio
# Note this order: 1: topic memory, 2: history memory 3: local memory
total_mask = torch.cat([topic_mask, history_mask, local_mask], dim=1)
total_mem = torch.cat([topic_mem, history_mem, local_mem], dim=1)
batch_size = inps.size(0)
trg_len = trgs.size(1)
outs = torch.zeros(batch_size, trg_len, self.vocab_size,
dtype=torch.float, device=self.device)
state = init_state
inp = self.bos_tensor.repeat(batch_size)
dec_states, attn_weights = [], []
# generate each line
for t in range(0, trg_len):
out, state, align = self.dec_step(inp, state, phs[:, t],
lens[:, t], total_mem, total_mask, global_trace, topic_trace)
outs[:, t, :] = out
attn_weights.append(align)
# teach force with a probability
is_teach = random.random() < teach_ratio
if is_teach or (not self.training):
inp = trgs[:, t]
else:
normed_out = F.softmax(out, dim=-1)
inp = normed_out.data.max(1)[1]
dec_states.append(state.unsqueeze(2)) # (B, H, 1)
attn_weights.append(align)
# write the history memory
if key_init_state is None:
new_history_mem, _ = self.layers['memory_write'](history_mem, local_mem,
1.0-local_mask.float(), global_trace, self.null_mem)
else:
new_history_mem = history_mem
# (B, L)
dec_mask = get_non_pad_mask(trgs, self.pad_idx, self.device)
# update global trace vector
new_global_trace = self.update_global_trace(global_trace, dec_states, dec_mask)
# update topic trace vector
# attn_weights: (B, 1, all_mem_slots) * L_gen
concat_aligns = torch.cat(attn_weights, dim=1)
new_topic_trace = self.update_topic_trace(topic_trace, topic_mem, concat_aligns)
return outs, new_history_mem, new_global_trace, new_topic_trace
def initialize_mems(self, keys):
batch_size = keys[0].size(0)
topic_mem, topic_mask, key_init_state = self.computer_topic_memory(keys)
history_mem = torch.zeros(batch_size, self.his_mem_slots, self.mem_size,
dtype=torch.float, device=self.device)
# default: True, masked
history_mask = torch.ones(batch_size, self.his_mem_slots,
dtype=torch.float, device=self.device).bool()
global_trace = torch.zeros(batch_size, self.global_trace_size,
dtype=torch.float, device=self.device)
topic_trace = torch.zeros(batch_size, self.topic_trace_size+self.topic_slots,
dtype=torch.float, device=self.device)
self.compute_null_mem(batch_size)
return topic_mem, topic_mask, history_mem, history_mask,\
global_trace, topic_trace, key_init_state
def rebuild_inps(self, ori_inps, last_outs, teach_ratio):
# ori_inps: (B, L)
# last_outs: (B, L, V)
inp_len = ori_inps.size(1)
new_inps = torch.ones_like(ori_inps) * self.pad_idx
mask = get_non_pad_mask(ori_inps, self.pad_idx, self.device).long()
if teach_ratio is None:
teach_ratio = self._teach_ratio
for t in range(0, inp_len):
is_teach = random.random() < teach_ratio
if is_teach or (not self.training):
new_inps[:, t] = ori_inps[:, t]
else:
normed_out = F.softmax(last_outs[:, t], dim=-1)
new_inps[:, t] = normed_out.data.max(1)[1]
new_inps = new_inps * mask
return new_inps
def forward(self, all_inps, all_trgs, all_ph_inps, all_len_inps, keys, teach_ratio=None,
flexible_inps=False):
'''
all_inps: (B, L) * sens_num
all_trgs: (B, L) * sens_num
all_ph_inps: (B, L) * sens_num
all_len_inps: (B, L) * sens_num
keys: (B, L) * topic_slots
flexible_inps: if apply partial teaching force to local memory.
False: the ground-truth src line is stored into the local memory
True: for local memory, ground-truth characters will be replaced with generated characters with
the probability of 1- teach_ratio.
NOTE: this trick is *not* adopted in our original paper, which could lead to
better BLEU and topic relevance, but worse diversity of generated poems.
'''
all_outs = []
topic_mem, topic_mask, history_mem, history_mask,\
global_trace, topic_trace, key_init_state = self.initialize_mems(keys)
for step in range(0, self.sens_num):
if step > 0:
key_init_state = None
if step >= 1 and flexible_inps:
inps = self.rebuild_inps(all_inps[step], all_outs[-1], teach_ratio)
else:
inps = all_inps[step]
outs, history_mem, global_trace, topic_trace \
= self.run_decoder(inps, all_trgs[step],
all_ph_inps[step], all_len_inps[step], key_init_state,
history_mem, history_mask, topic_mem, topic_mask,
global_trace, topic_trace, teach_ratio)
if step >= 1:
history_mask = history_mem.abs().sum(-1).eq(0) # (B, mem_slots)
all_outs.append(outs)
return all_outs
# --------------------------
# graphs for pre-training
def dseq_graph(self, inps, trgs, ph_inps, len_inps, teach_ratio=None):
# pre-train the encoder and decoder as a denoising Seq2Seq model
batch_size, trg_len = trgs.size(0), trgs.size(1)
length = get_seq_length(inps, self.pad_idx, self.device)
emb_inps = self.layers['word_embed'](inps)
emb_phs = self.layers['ph_embed'](ph_inps)
emb_lens = self.layers['len_embed'](len_inps)
# outs: (B, L, 2 * H)
# states: (2, B, H)
_, enc_states = self.layers['encoder'](emb_inps, length)
init_state = self.layers['dec_init_pre'](enc_states.transpose(0, 1).
contiguous().view(batch_size, -1))
outs = torch.zeros(batch_size, trg_len, self.vocab_size,
dtype=torch.float, device=self.device)
if teach_ratio is None:
teach_ratio = self._teach_ratio
state = init_state
inp = self.bos_tensor.repeat(batch_size, 1)
# generate each line
for t in range(0, trg_len):
emb_inp = self.layers['word_embed'](inp)
x = self.layers['merge_x_pre'](torch.cat(
[emb_inp, emb_phs[:, t].unsqueeze(1), emb_lens[:, t].unsqueeze(1)],
dim=-1))
cell_out, state, = self.layers['decoder'](x, state)
out = self.layers['out_proj'](cell_out)
outs[:, t, :] = out
# teach force with a probability
is_teach = random.random() < teach_ratio
if is_teach or (not self.training):
inp = trgs[:, t].unsqueeze(1)
else:
normed_out = F.softmax(out, dim=-1)
top1 = normed_out.data.max(1)[1]
inp = top1.unsqueeze(1)
return outs
# ----------------------------------------------
def dseq_parameter_names(self):
required_names = ['word_embed', 'ph_embed', 'len_embed',
'encoder', 'decoder', 'out_proj',
'dec_init_pre', 'merge_x_pre']
return required_names
def dseq_parameters(self):
names = self.dseq_parameter_names()
required_params = [self.layers[name].parameters() for name in names]
return chain.from_iterable(required_params)
# ------------------------------------------------ | [
"mtmoonyi@gmail.com"
] | mtmoonyi@gmail.com |
1a7048886021c154c279d665d513e857da759255 | 95e9ec4b3b0d86063da53a0e62e138cf794cce3a | /webroot/dqb/dqb/base/myjson.py | e6843b72d13a46c5fb705787579293028f384d2f | [] | no_license | wjl626nice/1902 | c3d350d91925a01628c9402cbceb32ebf812e43c | 5a1a6dd59cdd903563389fa7c73a283e8657d731 | refs/heads/master | 2023-01-05T23:51:47.667675 | 2019-08-19T06:42:09 | 2019-08-19T06:42:09 | 180,686,044 | 4 | 1 | null | 2023-01-04T07:35:24 | 2019-04-11T00:46:43 | Python | UTF-8 | Python | false | false | 1,822 | py | from django.shortcuts import HttpResponse
from rest_framework.renderers import JSONRenderer
from base.errcode import err_number
class JSONResponse(HttpResponse):
"""
用于返回JSON数据.
"""
def __init__(self,code,data='',total=1,count=-1,**kwargs):
kwargs['content_type'] = 'application/json'
try:
content = JSONRenderer().render(data)
if code:
content = '{"code":' \
+ str(code) \
+ ',"msg":"' \
+ err_number[str(code)] \
+ '","data":[]}'
else:
if count < 0:
content = '{"code":'\
+str(code)\
+',"msg":"'\
+err_number[str(code)]\
+'","total":'\
+str(total)\
+',"data":'\
+str(content,encoding="utf-8")\
+'}'
else:
content = '{"code":' \
+ str(code) \
+ ',"msg":"' \
+ err_number[str(code)] \
+ '","total":' \
+ str(total) \
+ ',"count":' \
+ str(count) \
+ ',"data":' \
+ str(content, encoding="utf-8") \
+ '}'
except:
content = '{"code":' \
+ '-1' \
+ ',"msg":"返回有误","data":[]}'
super(JSONResponse, self).__init__(content, **kwargs) | [
"18537160262@qq.com"
] | 18537160262@qq.com |
500f911b398fe52f8e014570e7384f3c7ded6399 | c658ecefe15376237f8ea23adf186a14f2efa3a6 | /pre_config/initial_config/initial_config.py | 566798d321c1b80c00b0482195b8f749f55c23a0 | [] | no_license | mart82/Cloud-Services | b5ff7af061c8bc380da39c2b95fe6f215d53181f | ad14232b37ccd75c7214c72b9ebb39e95461c62a | refs/heads/master | 2021-05-11T12:36:39.941456 | 2018-01-26T15:30:47 | 2018-01-26T15:30:47 | 117,662,249 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 356 | py | from jnpr.junos import Device
from jnpr.junos.utils.config import Config
import yaml
device=Device (host="10.1.1.1", user="root", password="Jun05!")
device.open()
cfg = Config(device)
cfg.rollback()
s=open('test-startup.yml').read()
myvars=yaml.load(s)
cfg.load(template_path='test-startup.j2', template_vars=myvars, format='set')
cfg.pdiff()
cfg.commit()
| [
"noreply@github.com"
] | noreply@github.com |
45529f80985c513bb14b4d7606d761fca5dbb3da | ccf136bcae86a86ea06950b608a2f1560e75ad3f | /frida_test/stringhook.py | 3b448a42bf5c78e2a4b6c3830d582759f53054ed | [] | no_license | mr6r4y/re-test | ebdbb9ede5c6ba59751b502b2a917dff9d218aac | 29756093f6ca869b9527ea650d3ac6a1e60a07e9 | refs/heads/master | 2020-12-30T15:20:57.253567 | 2020-12-12T17:53:39 | 2020-12-12T17:53:39 | 91,121,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | from __future__ import print_function
import frida
import sys
session = frida.attach("hi")
script = session.create_script("""
var st = Memory.allocUtf8String("TESTMEPLZ!");
var f = new NativeFunction(ptr("%s"), 'int', ['pointer']);
// In NativeFunction param 2 is the return value type,
// and param 3 is an array of input types
f(st);
""" % int(sys.argv[1], 16))
script.load() | [
"mr6r4y@gmail.com"
] | mr6r4y@gmail.com |
2395a6322bb5d6721661f897482429f74cbc7fb7 | 67308192424d52fa6f795cebed7c64411619768b | /oop/magicMethod.py | 3b1196bc07c29f23f7dece24609f2708fa86e113 | [] | no_license | s654632396/learnPython | 4cea82b1eb4af47857f0b27f03e441380242550e | b170a5137e7773d14248137fb4a0dfeecc483af5 | refs/heads/master | 2021-01-20T07:43:09.444182 | 2017-05-04T15:23:39 | 2017-05-04T15:23:39 | 90,036,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,996 | py | # coding=utf-8
# 特殊方法 / 魔术方法
# 用于 print 的 __str__
# 用于 len 的 __len__
# 用于 cmp 的 __cmp__
# ...
# 定义在class 中
# 不需要直接调用
# 有关联性的特殊方法都必须实现,如:
# __getattr__
# __setattr__
# __delattr__
# __str__ & __repr__
# 将一个类的实例变成 str, 需要实现特殊方法 __str__()
class Person(object):
def __init__(self, name):
self.name = name
def __str__(self):
return "(Object- Person: %s)" % (self.name)
__repr__ = __str__
p = Person('akarin')
print p
class Student(Person):
def __init__(self, name, gender, score):
Person.__init__(self, name)
self.gender = gender
self.score = score
def __str__(self):
return "<%s: %s,%s,%s>" % ('Student', self.name, self.gender, self.score)
__repr__ = __str__
def __cmp__(self, other):
if self.score > other.score:
return 1
elif self.score == other.score:
return 0
else:
return -1
s = Student('kongo', 'female', 94)
print s
print s.__repr__()
# __cmp__ 定义对象的排序
L = [("kongo", "female", 94), ("haruna", "female", 100), ("bismarck", "female", 93), ]
Ls = [Student(x, y, z) for x, y, z in L]
print Ls
Ls = sorted(Ls)
print Ls
# __len__
class Ships(object):
def __init__(self, *ars):
self.lists = {}
for index, name in enumerate(ars):
self.lists[index] = name
def __len__(self):
return len(self.lists)
s = Ships('haruna', 'kongo', 'yamato', 'shimakaze')
print s.lists
print len(s)
class Fib(object) :
def __init__(self, num):
a, b, L = 0, 1, []
for n in range(num) :
L.append(a)
a, b = b, a + b
self.numbers = L
def __str__(self):
return str(self.numbers)
def __len__(self):
return len(self.numbers)
__repr__ = __str__
f = Fib(10)
print f
print len(f)
| [
"s654632396@hotmail.com"
] | s654632396@hotmail.com |
07c8d6739a953f5ee1ed1512aa96f4affe98278d | c557afad19bec111e5dbc128f7e64a97997519ad | /[yuya]/bin/pip3 | 09dc3292d9d9db960b951653f583d8e4882467ec | [] | no_license | yuyayoshidatokyo/Aidemytraining | 4bb39f7522cd81d4b54f68060e61f8c944497308 | dae343843e47be4b63399b33625371936aefaf96 | refs/heads/master | 2020-03-20T13:43:44.353288 | 2018-06-15T08:59:10 | 2018-06-15T08:59:10 | 137,464,641 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | #!/Users/yuya/Desktop/[yuya]/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"a15.r5th@g.chuo-u.ac.jp"
] | a15.r5th@g.chuo-u.ac.jp | |
433d7d3faf281d73df9a7ed24bac6ed44a72b96c | f6cae515a4718b9f5165f739366baebf7bb50de3 | /hazard_feed/migrations/0007_emailtemplates.py | 2e0c69484a4404e82a7c8daff6aad12feaf1ecc6 | [] | no_license | hitnik/pogoda_feed | 1781a9a1a7d1fb7259e0711ac2778881a0dfd880 | ce5fe98c4200946330ae65f1b1df7ebcbec5c864 | refs/heads/master | 2022-12-10T17:57:27.488518 | 2021-07-15T19:52:49 | 2021-07-15T19:52:49 | 235,610,147 | 0 | 0 | null | 2022-12-08T07:46:26 | 2020-01-22T16:04:54 | Python | UTF-8 | Python | false | false | 793 | py | # Generated by Django 3.0.2 on 2020-02-06 04:40
from django.db import migrations, models
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
('hazard_feed', '0006_weatherrecipients_uuid'),
]
operations = [
migrations.CreateModel(
name='EmailTemplates',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(editable=False, max_length=64, unique=True)),
('template', tinymce.models.HTMLField(null=True)),
],
options={
'verbose_name': 'Email Template',
'verbose_name_plural': 'Email Templates',
},
),
]
| [
"hitnik@gmail.com"
] | hitnik@gmail.com |
2411b3f6bbfe5e5ffedcfd7bed87219288f96d15 | ce9933202516a92da5b6d05137e260baa2f70e9b | /kkcabinet/__init__.py | f06366173f540423958eb0b37b3f06a16ae51076 | [] | no_license | sushenzhang/KKCabinet_project | a067bb1e4b481cbf73861ae76cefecadac859dab | 1b9064102ad8b0cda959a2a05bb0120ced2bffdb | refs/heads/master | 2023-01-09T06:30:12.950246 | 2020-04-04T14:43:49 | 2020-04-04T14:43:49 | 249,875,843 | 0 | 0 | null | 2022-12-27T15:37:04 | 2020-03-25T03:14:41 | HTML | UTF-8 | Python | false | false | 31 | py | """
Package for kkcabinet.
"""
| [
"zhangsushen0@gmail.com"
] | zhangsushen0@gmail.com |
b1ce683f3b89b48ea7afa28f56b7fc8eaee1e74e | 502cfb137f5b04f70a29eb28d787582f7fc2b062 | /test.py | 44eff914d4efb3ccbc360a09e5e3bdbac81262d4 | [] | no_license | Uncle-Yuanl/YOLOv3-tensorflow2.0 | 1c5ca2981e41f11aa9ed227760b1d10c8adadf91 | 41d52d449cc8113b0a781b6bc497a15c35eff502 | refs/heads/master | 2022-09-15T17:12:06.046537 | 2020-05-27T01:20:23 | 2020-05-27T01:20:23 | 258,966,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,474 | py | # -*- coding: utf-8 -*-
"""
Created on Wed May 13 12:56:24 2020
@author: Administrator
"""
from core.yolov3 import YOLOv3, decode
import core.utils as utils
from core.config import cfg
import tensorflow as tf
import cv2
import numpy as np
import shutil
import os
# 确定参数
INPUT_SIZE = 416
CLASSES = utils.read_class_names(cfg.YOLO.CLASSES)
NUM_CLASS = len(CLASSES)
predicted_dir_path = '../mAP/predicted'
ground_truth_dir_path = '../mAP/ground_truth'
if os.path.exists(predicted_dir_path): shutil.rmtree(predicted_dir_path)
if os.path.exists(ground_truth_dir_path): shutil.rmtree(ground_truth_dir_path)
if os.path.exists(cfg.TEST.DETECTED_IMAGE_PATH): shutil.rmtree(cfg.TEST.DETECTED_IMAGE_PATH)
os.mkdir(predicted_dir_path)
os.mkdir(ground_truth_dir_path)
os.mkdir(cfg.TEST.DETECTED_IMAGE_PATH)
# build model
input_layer = tf.keras.Input(shape = [INPUT_SIZE, INPUT_SIZE, 3])
feature_maps = YOLOv3(input_layer)
output_layer = []
for i, fm in enumerate(feature_maps):
output_tensor = decode(fm, i)
output_layer.append(output_tensor)
model = tf.Model(inputs = input_layer, outputs = output_layer)
model.load_weights('./yolov3')
# 打开test数据文件,边测边写
with open(cfg.TEST.ANNOT_PATH, 'r') as annotation_file:
# annotation = annotation_file.readlines()
for num, line in enumerate(annotation_file):
annotation = line.strip().split()
image_path = annotation[0]
image_name = annotation.split('/')[-1]
image = cv2.imread(image_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# [ for i ] for前是对i的全部操作,[]的结果就是操作玩的所有结果
bbox_data_gt = np.array([list(map(int, box.split(','))) for box in annotation[1:]])
# 注意没有gt_bbox的情况
# if bbox_data_gt is None:
if len(bbox_data_gt) == 0:
bboxes_gt = []
classes_gt = []
else:
bboxes_gt, classes_gt = bbox_data_gt[:, :4], bbox_data_gt[:, 4]
ground_truth_path = os.path.join(ground_truth_dir_path, str(num) + '.txt')
print('==> ground truth of %s:' % image_name)
num_bbox_gt = len(bboxes_gt)
# 将gt_bbox信息写入文件
with open(ground_truth_path, 'w') as f:
for i in range(bboxes_gt):
class_name = CLASSES[classes_gt[i]]
xmin, ymin, xmax, ymax = list(map(str, bboxes_gt[i])) # 写文件 --> str
gt_bbox_mess = ' '.join([class_name, xmin, ymin, xmax, ymax]) + '\n'
f.write(gt_bbox_mess)
print('\t' + str(gt_bbox_mess).strip())
# predict process
print('predict result of %s:' % image_name)
predicted_result_path = os.path.join(predicted_dir_path, str(num) + '.txt')
image_size = image.shape[:2]
image_data = utils.image_preprocess(np.copy(image), INPUT_SIZE) # np.copy()
image_data = image[np.newaxis, :, :]
pred_bbox = model.predict(image_data)
# 3 * 3 --> 3,
pred_bbox = [tf.reshape(-1, (tf.shape(x)[-1])) for x in pred_bbox]
pred_bbox = tf.concat(pred_bbox, axis = 0)
bboxes = utils.postprocess_boxes(pred_bbox, image_size, INPUT_SIZE, cfg.TEST.SCORE_THRESHOLD)
bboxes = utils.nms(bboxes, cfg.TEST.IOU_THRESHOLD, method='nms')
# 图片写道路径
if cfg.TEST.DETECTED_IMAGE_PATH is not None:
image = utils.draw_bboxes(image, bboxes)
cv2.imwrite(cfg.TEST.DETECTED_IMAGE_PATH + image_name, image)
# 写入预测结果,带score
with open(predicted_result_path, 'w') as f:
for bbox in bboxes:
# coor = bbox[:4]
coor = np.array(bbox[:4], dtype = np.int32)
score = bbox[4]
class_ind = int(bbox[5])
class_name = CLASSES[class_ind]
# int,float --> str
score = '%.4f' % score
xmin, ymin, xmax, ymax = list(map(str, coor))
pred_bbox_mess = ' '.join([class_name, score, xmin, ymin, xmax, ymax]) + '\n'
f.write(pred_bbox_mess)
print('\t' + str(pred_bbox_mess).strip())
| [
"noreply@github.com"
] | noreply@github.com |
91d0813852bf8b67c200a9d2a5cb21238342eedc | ed314b839f2519d982b0091ea3b2b3cc8d4243b1 | /成绩判断动动手1.py | 332d1821290e68930e0db42fb5fe2bb28342e790 | [] | no_license | RayYiHang/pycode | 2a18bfa82e4453cf8a7b3891965f9813c2e66aaf | bb397e64b93cb189f6fc5d040fe8dfd1693ea548 | refs/heads/master | 2021-02-15T02:43:10.587981 | 2020-03-04T12:20:50 | 2020-03-04T12:20:50 | 244,857,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | score = int(input("请输入分数"))
if 80 > score >= 70:
print("C")
elif 90 > score >= 80:
print("B")
elif 100 >= score >= 90:
print("A")
elif 70 > score >= 60:
print("C")
elif 0 < score < 60:
print("D")
else:
print("输入错误") | [
"leiyihang1234@163.com"
] | leiyihang1234@163.com |
08d5d500388fe07a98eca25fb7e0cc15ff850948 | c9499ae048d31f9e37b143500728ca55d8a3e6e3 | /MainPackages/RunDSS.py | 5acbcadcea9370cdb1158d70179c69eb13c1866d | [] | no_license | fprystupczuk/HPET_PowerFlow_Model | c7eadf6e3a3942736ae3caff8ccc62f45429d9a6 | 671f5a3085094bf135753c9cda1edb511dc80f65 | refs/heads/main | 2023-04-26T21:14:30.700547 | 2021-05-25T11:15:49 | 2021-05-25T11:15:49 | 370,665,533 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,402 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import random
from MainPackages.SolvePET import SolvePET
from MainPackages.SolveHPET import SolveHPET
from MainPackages.Extract_DSSmonitors_data import Extract_DSSmonitors_data
def RunDSS(os,np,pd,math,Main_path,model,Network,Feeders,Vcontrol,PV_penetration,ElectricVars,Profiles_path,Main_Results_path,timeArray,DSSObj,Lines_set,Loads_LV,Load_bus,Load_phase,Bus_Vnom,MV_loads,TransformerRating,PowerRatingPET,FractionPowerHPET):
DSSText = DSSObj.Text # to excecute OpenDSS text commands
DSSCircuit = DSSObj.ActiveCircuit # Use it to access the elements in the circuit (e.g., capacitors, buses, etc.)
DSSElement = DSSCircuit.ActiveCktElement
DSSSolution = DSSCircuit.Solution
DSSMonitors = DSSCircuit.Monitors
""" DEMAND AND PV ALLOCATION PROFILES
As written, the script randomly allocates demand and PV active power profiles
to the network using the profiles provided in NetworkData/Profiles """
# MV Demand
MV_Dem_data = {} # defined as dictionary
for i in range(3):
i_MV = 'MV_' + str(i+1)
csv_data = pd.read_csv(Profiles_path + '/Load_profiles/Profile_MV_PZIP_QZIP.csv',index_col=0)
MV_Dem_data[i_MV,'P_Profile'] = csv_data['P_' + str(i+1)] # Active power profile [W]
MV_Dem_data[i_MV,'Q_Profile'] = csv_data['Q_' + str(i+1)] # Reactive power profile [var]
MV_Dem_data[i_MV,'ZIP_P'] = csv_data[['P_Z','P_I','P_P']]
MV_Dem_data[i_MV,'ZIP_Q'] = csv_data[['Q_Z','Q_I','Q_P']]
MV_Dem_data[i_MV,'ZIP_P'].columns = ['Z','I','P']
MV_Dem_data[i_MV,'ZIP_Q'].columns = ['Z','I','P']
# LV Demand
np.random.seed(0) # for repeatability
House_Dem_data = {} # defined as dictionary
h_profiles = np.random.randint(1,100,size=Loads_LV.size) # define which random profiles will be allocated to each customer
for i_house in range(Loads_LV.size):
csv_data = pd.read_csv(Profiles_path + '/Load_profiles/Profile_' + str(h_profiles[i_house]) + '_PZIP_QZIP.csv',index_col=0)
House_Dem_data[Loads_LV[i_house],'P_Profile'] = csv_data['P'] # Active power profile [W]
House_Dem_data[Loads_LV[i_house],'Q_Profile'] = csv_data['Q'] # Reactive power profile [var] - here, assumed a PF of
House_Dem_data[Loads_LV[i_house],'ZIP_P'] = csv_data[['P_Z','P_I','P_P']]
House_Dem_data[Loads_LV[i_house],'ZIP_Q'] = csv_data[['Q_Z','Q_I','Q_P']]
House_Dem_data[Loads_LV[i_house],'ZIP_P'].columns = ['Z','I','P']
House_Dem_data[Loads_LV[i_house],'ZIP_Q'].columns = ['Z','I','P']
# PV generation
np.random.seed(1) # for repeatability
random.seed(1)
PV_Gen_data = {} # defined as dictionary
PV_set = np.array(random.sample(list(Loads_LV), int(np.rint(len(Loads_LV) * PV_penetration)))) # Random PV alocation
PV_profiles = np.random.randint(1,100,size=PV_set.size) # Random selection of PV profiles from the pool
PV_profiles_data = pd.read_excel(Profiles_path + '/PV_profiles/Summer_PV_Profiles.xlsx',header=0,index_col=0) # Read selected profiles (data in kW)
PV_rating_data = pd.read_csv(Profiles_path + '/PV_profiles/PV_rating.csv')
for i_PV in range(PV_set.size):
PV_Gen_data[PV_set[i_PV],'Profile'] = PV_profiles_data[PV_profiles[i_PV]]*1000 # Active power output for this PV [W]
PV_Gen_data[PV_set[i_PV],'Rating'] = PV_rating_data.loc[PV_profiles[i_PV],'Rating [kW]'] # PV rating in kW
# Create PVs in OpenDSS
for PV in PV_set:
DSSText.Command = 'New Load.PV_' + PV + ' Phases=1 Bus1=' + Load_bus.loc[PV,'Bus'] + '.' + str(Load_phase.loc[PV,'phase']) + ' kV=' + str(0.4/math.sqrt(3)) + ' kW=0 Kvar=0 Model=1 Vminpu=0.7 Vmaxpu=1.3'
"""
RUN POWER FLOW AND OBTAIN RESULTS
"""
# OUTPUT DATA CONTAINERS
Data = np.zeros((timeArray.size,ElectricVars.size,Lines_set.size,3))
## Order of ElectricVars in Data array ##
# 0 <- Vmag_send
# 1 <- Vang_send
# 2 <- Vmag_rec
# 3 <- Vang_rec
# 4 <- Imag
# 5 <- P_rec
# 6 <- Q_rec
# 7 <- P_send
# 8 <- Q_send
# Name of variables to be collected for not per line but only for each time-step
P_LV = np.zeros(timeArray.size) # P delivered by the transformer's secondary side (sum of phases)
Q_LV = np.zeros(timeArray.size) # Q delivered by the transformer's secondary side (sum of phases)
TotalLosses = np.zeros((timeArray.size,2)) # Line losses + Transformer losses
LFTlosses = np.zeros(timeArray.size) # Line losses + Transformer losses
PETlosses = np.zeros(timeArray.size) # Losses of the Power Electronic Module
PETefficiency = np.zeros(timeArray.size) # Efficiency of the Power Electronic Module
PET_Spu = np.zeros(timeArray.size) # Apparent power of the Power Electronic Module in pu
PET_PF_LV = np.zeros(timeArray.size) # Power Factor at the transformer's secondary side
PEM_Pc1_pu = np.zeros((timeArray.size,3)) # Active power of Module 1 of the HPET in pu
PEM_Qc1_pu = np.zeros((timeArray.size,3)) # Reactive power of Module 1 of the HPET in pu
PEM_Pc2_pu = np.zeros((timeArray.size,3)) # Active power of Module 2 of the HPET in pu
PEM_Qc2_pu = np.zeros((timeArray.size,3)) # Reactive power of Module 2 of the HPET in pu
HPETlosses = np.zeros((timeArray.size,2)) # HPET losses: PEM losses,Transformer losses
PEMefficiency1 = np.zeros(timeArray.size) # Efficiency of Module 1 of the HPET
PEMefficiency2 = np.zeros(timeArray.size) # Efficiency of Module 2 of the HPET
PEM_Sc1_pu = np.zeros(timeArray.size) # Apparent power of Module 1 of the HPET in pu
PEM_Sc2_pu = np.zeros(timeArray.size) # Apparent power of Module 2 of the HPET in pu
PEM_PFc1 = np.zeros(timeArray.size) # Power Factor at Module 1 of the HPET
PEM_PFc2 = np.zeros(timeArray.size) # Power Factor at Module 2 of the HPET
# Path to store results
fileName_start = Main_Results_path + '/DataResults/' + Network + '/'
try: os.makedirs(fileName_start)
except FileExistsError: pass
# Run time series of Power Flow analysis
for t in range(timeArray.size):
# OpenDSS MV loads
if MV_loads == 'y':
for i in range(3):
i_MV = 'MV_' + str(i+1)
DSSCircuit.Loads.Name = i_MV
DSSCircuit.Loads.kW = MV_Dem_data[i_MV,'P_Profile'].loc[timeArray[t]]/1000.0
DSSCircuit.Loads.kvar = MV_Dem_data[i_MV,'Q_Profile'].loc[timeArray[t]]/1000.0
Z_p = float(MV_Dem_data[i_MV,'ZIP_P'].loc[timeArray[t],'Z'])
I_p = float(MV_Dem_data[i_MV,'ZIP_P'].loc[timeArray[t],'I'])
Z_q = float(MV_Dem_data[i_MV,'ZIP_Q'].loc[timeArray[t],'Z'])
I_q = float(MV_Dem_data[i_MV,'ZIP_Q'].loc[timeArray[t],'I'])
DSSCircuit.Loads.ZIPV = (Z_p,I_p,1-Z_p-I_p,Z_q,I_q,1-Z_q-I_q,0.8) # Last coefficient: voltage in pu from wich the load model changes to constant impedance to facilitate convergency of OpenDSS
# OpenDSS LV feeder house demand
for i_house in Loads_LV:
DSSCircuit.Loads.Name = i_house
DSSCircuit.Loads.kW = House_Dem_data[i_house,'P_Profile'].loc[timeArray[t]]/1000.0
DSSCircuit.Loads.kvar = House_Dem_data[i_house,'Q_Profile'].loc[timeArray[t]]/1000.0
Z_p = float(House_Dem_data[i_house,'ZIP_P'].loc[timeArray[t],'Z'])
I_p = float(House_Dem_data[i_house,'ZIP_P'].loc[timeArray[t],'I'])
Z_q = float(House_Dem_data[i_house,'ZIP_Q'].loc[timeArray[t],'Z'])
I_q = float(House_Dem_data[i_house,'ZIP_Q'].loc[timeArray[t],'I'])
DSSCircuit.Loads.ZIPV = (Z_p,I_p,1-Z_p-I_p,Z_q,I_q,1-Z_q-I_q,0.8) # Last coefficient: voltage in pu from wich the load model changes to constant impedance to facilitate convergency of OpenDSS
# OpenDSS PVs
for PV in PV_set:
DSSCircuit.Loads.Name = 'PV_' + PV
DSSCircuit.Loads.kW = -1 * PV_Gen_data[PV,'Profile'].loc[timeArray[t]]/1000.0
DSSCircuit.Loads.kvar = -1 * 0.0 # Assumes PF=1 for all the PVs
# Solve time step for different transformer models
if model == 'LFT':
# SOLVE
DSSText.Command ='Reset Monitors'
DSSSolution.Solve()
if not(DSSSolution.Converged):
raise ValueError('Solution did not Converge')
DSSMonitors.SampleAll()
DSSMonitors.SaveAll()
DSSMonitors.Name = 'LFT-PQ'
for phase in range(3):
P_LV[t] = P_LV[t] - DSSMonitors.Channel(phase * 2 + 1)[0]
Q_LV[t] = Q_LV[t] + abs(DSSMonitors.Channel(phase * 2 + 2)[0])
# Compute transformer losses and total losses
DSSCircuit.Transformers.Name = 'TR1'
LFTlosses[t] = DSSElement.Losses[0]/1000 # DSSElement.Losses returns Watts and VAr
TotalLosses[t,:] = np.asarray(DSSCircuit.Losses)/1000 # DSSCircuit.Losses returns Watts and VAr
if model == 'PET':
# Regulate output voltage, compute losses and update active power in the primary side
[Pp,Qp,P_LV[t],Q_LV[t],PETlosses[t],PETefficiency[t],PET_Spu[t],PET_PF_LV[t]] = SolvePET(Main_path,np,Vcontrol,DSSObj,Bus_Vnom,Loads_LV,Load_phase,TransformerRating)
# Compute total losses and add power electronics losses
TotalLosses[t,:] = np.asarray(DSSCircuit.Losses)/1000 # DSSCircuit.Losses returns Watts and VAr
TotalLosses[t,0] = TotalLosses[t,0] + PETlosses[t]
if model == 'HPET':
# compute losses and update active power in the primary side
[PEM_Pc1_pu[t,:],PEM_Qc1_pu[t,:],PEM_Pc2_pu[t,:],PEM_Qc2_pu[t,:],HPETlosses[t,0],PEMefficiency1[t],PEMefficiency2[t],PEM_Sc1_pu[t],PEM_Sc2_pu[t],PEM_PFc1[t],PEM_PFc2[t]] = SolveHPET(Main_path,t,np,math,Vcontrol,DSSObj,Loads_LV,Load_phase,TransformerRating,FractionPowerHPET)
# Obtain losses at the LFT of the HPET
for phase in range(3):
DSSCircuit.Transformers.Name = 'Trafo' + str(phase+1)
HPETlosses[t,1] = HPETlosses[t,1] + DSSElement.Losses[0]/1000 # HPET losses: PEM losses,Transformer losses
# Compute total losses and add power electronics losses
TotalLosses[t,:] = np.asarray(DSSCircuit.Losses)/1000 # DSSCircuit.Losses returns Watts and VAr
TotalLosses[t,0] = TotalLosses[t,0] + HPETlosses[t,0] # HPET losses: PEM losses,Transformer losses
# Obtains the total power delivered by the secondary winding of the transformer
DSSMonitors.Name = 'Trafo_sec-PQ'
for phase in range(3):
P_LV[t] = P_LV[t] + DSSMonitors.Channel(phase * 2 + 1)[0]
Q_LV[t] = Q_LV[t] + abs(DSSMonitors.Channel(phase * 2 + 2)[0])
# Store main results into a numpy array
Data[t,:,:,:] = Extract_DSSmonitors_data(np,ElectricVars,DSSMonitors,Lines_set)
# Define final paths to store results for each transformer model
if model == 'LFT':
fileName_end = model + '_PV' + str(PV_penetration) + '_' + '-'.join(Feeders)
np.save(fileName_start + 'LFTlosses_' + fileName_end,LFTlosses)
if model == 'PET':
fileName_end = model + str(PowerRatingPET) + '_PV' + str(PV_penetration) + '_' + str(Vcontrol[0]) + str(Vcontrol[1][0]) + '_' + str(Vcontrol[1][1]) + '_' + '-'.join(Feeders)
np.save(fileName_start + 'PETlosses_' + fileName_end,PETlosses)
np.save(fileName_start + 'PETefficiency_' + fileName_end,PETefficiency)
np.save(fileName_start + 'PET_Spu_' + fileName_end,PET_Spu)
np.save(fileName_start + 'PET_PF_LV_' + fileName_end,PET_PF_LV)
if model == 'HPET':
fileName_end = model + str(FractionPowerHPET) + '_PV' + str(PV_penetration) + '_' + str(Vcontrol[0]) + str(Vcontrol[1][0]) + '_' + str(Vcontrol[1][1]) + '_' + '-'.join(Feeders)
np.save(fileName_start + 'HPETlosses_' + fileName_end,HPETlosses) # HPET losses: PEM losses,Transformer losses
np.save(fileName_start + 'PEM_Pc1_pu_' + fileName_end,PEM_Pc1_pu)
np.save(fileName_start + 'PEM_Qc1_pu_' + fileName_end,PEM_Qc1_pu)
np.save(fileName_start + 'PEM_Pc2_pu_' + fileName_end,PEM_Pc2_pu)
np.save(fileName_start + 'PEM_Qc2_pu_' + fileName_end,PEM_Qc2_pu)
np.save(fileName_start + 'PEMefficiency1_' + fileName_end,PEMefficiency1)
np.save(fileName_start + 'PEMefficiency2_' + fileName_end,PEMefficiency2)
np.save(fileName_start + 'PEM_Sc1_pu_' + fileName_end,PEM_Sc1_pu)
np.save(fileName_start + 'PEM_Sc2_pu_' + fileName_end,PEM_Sc2_pu)
np.save(fileName_start + 'PEM_PFc1_' + fileName_end,PEM_PFc1)
np.save(fileName_start + 'PEM_PFc2_' + fileName_end,PEM_PFc2)
# Save final results into the destination npy files
if MV_loads == 'y': fileName_end = fileName_end + '_MVloads'
np.save(fileName_start + 'Data_' + fileName_end,Data)
np.save(fileName_start + 'P_LV_' + fileName_end,P_LV)
np.save(fileName_start + 'Q_LV_' + fileName_end,Q_LV)
np.save(fileName_start + 'TotalLosses_' + fileName_end,TotalLosses)
| [
"nrvfederico@gmail.com"
] | nrvfederico@gmail.com |
5769534b17fe63983051a371daade258486785d2 | 585ce7b85c0db2f36ff3da3a1a12badcbe61e59d | /utils/.svn/text-base/PathHelper.py.svn-base | a35af1a1a9fd42c94655add1fdc935326389b788 | [] | no_license | cash2one/TTEngine | 41c9fb2ecad8b03c1b8002f6c9abf9978049e933 | 1f2525902417cfafbb5086fea9d730f0d92e84f2 | refs/heads/master | 2021-01-20T00:02:45.209609 | 2015-06-30T03:20:48 | 2015-06-30T03:20:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | #coding=utf8
__author__ = 'xj'
from utils.OsHelper import isWindows
import os
def getConfigDir(os):
return "server/config/"
def getMonitorDir(os):
return "server/monitor/"
def getLocalDir(os):
return "server/config_local/"
def getLicensePath(os, xtDir):
if isWindows(os):
return "bin/license.data"
else:
return "server/bin/license.data"
def getTempDir():
dir = "../temp/"
if not os.path.exists(dir):
os.makedirs(dir)
return dir
def getTarPath(rawPath):
if rawPath.find(":") != -1:
return rawPath.replace(":", "")
else:
if rawPath.startswith("/"):
return rawPath[1:]
else:
return rawPath
def getRealPathFromTar(rawPath, os):
if isWindows(os):
if len(rawPath) > 2:
if rawPath[1] == "/" :
return rawPath[0] + ":" + rawPath[1:]
else:
return rawPath
else:
return rawPath
else:
if rawPath.startswith("/"):
return rawPath
else:
return "/" + rawPath
| [
"2928807616@qq.com"
] | 2928807616@qq.com | |
11d574873016cebec8bc817967783c8384d642f8 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/opinkerfi-adagios/allPythonContent.py | 2c936d37e8f3eb56e3d4cf91652d8ed5eed39dfd | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462,213 | py | __FILENAME__ = auth
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Authorization related stuff in Adagios
"""
import adagios.status.utils
import adagios.views
auditors = []
operators = []
administrators = []
# administrator belongs to all the other groups
administrators += operators + auditors
access_list = list()
# Explicitly grant configuration access only to admins
access_list.append(('adagios.objectbrowser', "administrators"))
access_list.append(('adagios.okconfig_', "administrators"))
access_list.append(('adagios.misc.helpers', "administrators"))
access_list.append(('adagios.misc.views.settings', "administrators"))
access_list.append(('adagios.misc.views.gitlog', "administrators"))
access_list.append(('adagios.misc.views.service', "administrators"))
access_list.append(('adagios.rest.status.edit', "administrators"))
access_list.append(('adagios.status.views.contact', "administrators"))
access_list.append(('adagios.status.views.state_history', "administrators"))
access_list.append(('adagios.status.views.log', "administrators"))
access_list.append(('adagios.status.views.servicegroup', "administrators"))
access_list.append(('adagios.rest.status.state_history', "administrators"))
access_list.append(('adagios.rest.status.top_alert_producers', "administrators"))
access_list.append(('adagios.rest.status.update_check_command', "administrators"))
access_list.append(('adagios.rest.status.log_entries', "administrators"))
# Access to rest interface
access_list.append(('adagios.rest.views', "everyone"))
access_list.append(('adagios.rest.status', "everyone"))
access_list.append(('adagios.misc.rest', "everyone"))
# These modules should more or less be considered "safe"
access_list.append(('django.views.static', "everyone"))
access_list.append(('django.views.i18n', "everyone"))
access_list.append(('adagios.views', "everyone"))
access_list.append(('adagios.status', "everyone"))
access_list.append(('adagios.pnp', "everyone"))
access_list.append(('adagios.contrib', "everyone"))
access_list.append(('adagios.bi.views.index', "everyone"))
access_list.append(('adagios.bi.views.view', "everyone"))
access_list.append(('adagios.bi.views.json', "everyone"))
access_list.append(('adagios.bi.views.graphs_json', "everyone"))
access_list.append(('adagios.misc.helpers.needs_reload', "everyone"))
# If no other rule matches, assume administrators have access
access_list.append(('', "administrators"))
def check_access_to_path(request, path):
""" Raises AccessDenied if user does not have access to path
path in this case is a full path to a python module name for example: "adagios.objectbrowser.views.index"
"""
for search_path, role in access_list:
if path.startswith(search_path):
if has_role(request, role):
return None
else:
user = request.META.get('REMOTE_USER', 'anonymous')
message = "You do not have permission to access %s" % (path, )
raise adagios.exceptions.AccessDenied(user, access_required=role, message=message, path=path)
else:
return None
def has_access_to_path(request, path):
""" Returns True/False if user in incoming request has access to path
Arguments:
path -- string describing a path to a method or module, example: "adagios.objectbrowser.views.index"
"""
for search_path, role in access_list:
if path.startswith(search_path):
return has_role(request, role)
else:
return False
def has_role(request, role):
""" Returns true if the username in current request has access to a specific role """
user = request.META.get('REMOTE_USER', "anonymous")
# Allow if everyone is allowed access
if role == 'everyone':
return True
# Deny if nobody is allowed access
if role == 'nobody':
return False
# Allow if role is "contacts" and user is in fact a valid contact
if role == 'contacts' and adagios.status.utils.get_contacts(None, name=user):
return True
# Allow if role is "users" and we are in fact logged in
if role == 'users' and user != "anonymous":
return True
users_and_groups = globals().get(role, None)
if hasattr(adagios.settings, role):
for i in str(getattr(adagios.settings, role)).split(','):
i = i.strip()
if i not in users_and_groups:
users_and_groups.append(i)
# Deny if no role exists with this name
if not users_and_groups:
return False
# Allow if user is mentioned in your role
if user in users_and_groups:
return True
# If it is specifically stated that "everyone" belongs to the group
if "everyone" in users_and_groups:
return True
# Check if user belongs to any contactgroup that has access
contactgroups = adagios.status.utils.get_contactgroups(None, 'Columns: name', 'Filter: members >= %s' % user)
# Allow if we find user belongs to one contactgroup that has this role
for contactgroup in contactgroups:
if contactgroup['name'] in users_and_groups:
return True
# If we get here, the user clearly did not have access
return False
def check_role(request, role):
""" Raises AccessDenied if user in request does not have access to role """
if not has_role(request, role):
user = request.META.get('REMOTE_USER', 'anonymous')
message = "User does not have the required role"
raise adagios.exceptions.AccessDenied(username=user, access_required=role, message=message)
class AuthorizationMiddleWare(object):
""" Django MiddleWare class. It's responsibility is to check if an adagios user has access
if user does not have access to a given view, it is given a 403 error.
"""
def process_request(self, request):
return None
def process_view(self, request, view_func, view_args, view_kwargs):
if not adagios.settings.enable_authorization:
return None
function_name = view_func.__name__
module_name = view_func.__module__
if module_name == "adagios.rest.views" and function_name == 'handle_request':
module_name = view_kwargs['module_path']
function_name = view_kwargs['attribute']
try:
path = module_name + '.' + function_name
check_access_to_path(request, path)
except adagios.exceptions.AccessDenied, e:
return adagios.views.http_403(request, exception=e)
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.utils.translation import ugettext as _
import adagios.status.utils
import adagios.bi
class RemoveSubProcessForm(forms.Form):
""" Remove one specific sub process from a business process
"""
process_name = forms.CharField(max_length=100, required=True)
process_type = forms.CharField(max_length=100, required=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(RemoveSubProcessForm, self).__init__(*args, **kwargs)
def save(self):
process_name = self.cleaned_data.get('process_name')
process_type = self.cleaned_data.get('process_type')
self.bp.remove_process(process_name, process_type)
self.bp.save()
status_method_choices = map(
lambda x: (x, x), adagios.bi.BusinessProcess.status_calculation_methods)
class BusinessProcessForm(forms.Form):
""" Use this form to edit a BusinessProcess """
name = forms.CharField(max_length=100, required=True,
help_text=_("Unique name for this business process."))
#processes = forms.CharField(max_length=100, required=False)
display_name = forms.CharField(max_length=100, required=False,
help_text=_("This is the name that will be displayed to users on this process. Usually it is the name of the system this business group represents."))
notes = forms.CharField(max_length=1000, required=False,
help_text=_("Here you can put in any description of the business process you are adding. Its a good idea to write down what the business process is about and who to contact in case of downtimes."))
status_method = forms.ChoiceField(
choices=status_method_choices, help_text=_("Here you can choose which method is used to calculate the global status of this business process"))
state_0 = forms.CharField(max_length=100, required=False,
help_text=_("Human friendly text for this respective state. You can type whatever you want but nagios style exit codes indicate that 0 should be 'ok'"))
state_1 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent warning or performance problems"))
state_2 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent critical status"))
state_3 = forms.CharField(
max_length=100, required=False, help_text=_("Use this when status is unknown"))
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(BusinessProcessForm, self).__init__(*args, **kwargs)
def save(self):
c = self.cleaned_data
self.bp.data.update(c)
self.bp.save()
def remove(self):
c = self.data
process_name = c.get('process_name')
process_type = c.get('process_type')
if process_type == 'None':
process_type = None
self.bp.remove_process(process_name, process_type)
self.bp.save()
def clean(self):
cleaned_data = super(BusinessProcessForm, self).clean()
# If name has changed, look if there is another business process with
# same name.
new_name = cleaned_data.get('name')
if new_name and new_name != self.bp.name:
if new_name in adagios.bi.get_all_process_names():
raise forms.ValidationError(
_("Cannot rename process to %s. Another process with that name already exists") % new_name
)
return cleaned_data
def delete(self):
""" Delete this business process """
self.bp.delete()
def add_process(self):
process_name = self.data.get('process_name')
hostgroup_name = self.data.get('hostgroup_name')
servicegroup_name = self.data.get('servicegroup_name')
service_name = self.data.get('service_name')
if process_name:
self.bp.add_process(process_name, None)
if hostgroup_name:
self.bp.add_process(hostgroup_name, None)
if servicegroup_name:
self.bp.add_process(servicegroup_name, None)
if service_name:
self.bp.add_process(service_name, None)
self.bp.save()
choices = 'businessprocess', 'hostgroup', 'servicegroup', 'service', 'host'
process_type_choices = map(lambda x: (x, x), choices)
class AddSubProcess(forms.Form):
process_type = forms.ChoiceField(choices=process_type_choices)
process_name = forms.CharField(
widget=forms.HiddenInput(attrs={'style': "width: 300px;"}), max_length=100)
display_name = forms.CharField(max_length=100, required=False)
tags = forms.CharField(
max_length=100, required=False, initial="not critical")
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddSubProcess, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_process(**self.cleaned_data)
self.bp.save()
class AddHostgroupForm(forms.Form):
pass
class AddGraphForm(forms.Form):
host_name = forms.CharField(max_length=100,)
service_description = forms.CharField(max_length=100, required=False)
metric_name = forms.CharField(max_length=100, required=True)
notes = forms.CharField(max_length=100, required=False,
help_text=_("Put here a friendly description of the graph"))
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddGraphForm, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_pnp_graph(**self.cleaned_data)
self.bp.save()
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = tests
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
import tempfile
import os
import time
from django.test import TestCase
from django.test.client import Client
from django.utils.translation import ugettext as _
from adagios.bi import *
import adagios.utils
class TestBusinessProcess(TestCase):
def setUp(self):
fd, filename = tempfile.mkstemp()
BusinessProcess._default_filename = filename
def tearDown(self):
os.remove(BusinessProcess._default_filename)
def test_save_and_load(self):
""" This test will test load/save of a business process.
The procedure is as follows:
* Load a business process
* Save it
* Make changes
* Load it again, and verify changes were saved.
"""
bp_name = 'test_business_process'
b = BusinessProcess(bp_name)
b.load()
# Append a dot to the bp name and save
new_display_name = b.display_name or '' + "."
b.display_name = new_display_name
b.save()
# Load bp again
b = BusinessProcess(bp_name)
b.load()
self.assertEqual(b.display_name, new_display_name)
def test_add_process(self):
""" Test adding new processes to a current BP
"""
bp_name = 'test'
sub_process_name = 'sub_process'
sub_process_display_name = 'This is a subprocess of test'
b = BusinessProcess(bp_name)
b.add_process(sub_process_name, display_name=sub_process_display_name)
for i in b.get_processes():
if i.name == sub_process_name and i.display_name == sub_process_display_name:
return
else:
self.assertTrue(
False, 'We tried adding a business process but could not find it afterwards')
def test_hostgroup_bp(self):
bp_name = 'test'
hostgroup_name = 'acme-network'
b = BusinessProcess(bp_name)
b.add_process(hostgroup_name, 'hostgroup')
def test_remove_process(self):
""" Test removing a subprocess from a businessprocess
"""
bp_name = 'test'
sub_process_name = 'sub_process'
sub_process_display_name = 'This is a subprocess of test'
b = BusinessProcess(bp_name)
b.add_process(sub_process_name, display_name=sub_process_display_name)
self.assertNotEqual([], b.processes)
b.remove_process(sub_process_name)
self.assertEqual([], b.processes)
def test_get_all_processes(self):
get_all_processes()
def test_macros(self):
bp = get_business_process('uniq test case', status_method="use_worst_state")
macros_for_empty_process = {
'num_problems': 0,
'num_state_0': 0,
'num_state_1': 0,
'num_state_2': 0,
'num_state_3': 0,
'current_state': 3,
'friendly_state': 'unknown',
'percent_problems': 0,
'percent_state_3': 0,
'percent_state_2': 0,
'percent_state_1': 0,
'percent_state_0': 0
}
self.assertEqual(3, bp.get_status())
self.assertEqual(macros_for_empty_process, bp.resolve_all_macros())
bp.add_process("always_ok", status_method="always_ok")
bp.add_process("always_major", status_method="always_major")
macros_for_nonempty_process = {
'num_problems': 1,
'num_state_0': 1,
'num_state_1': 0,
'num_state_2': 1,
'num_state_3': 0,
'current_state': 2,
'friendly_state': 'major problems',
'percent_problems': 50.0,
'percent_state_3': 0.0,
'percent_state_2': 50.0,
'percent_state_1': 0.0,
'percent_state_0': 50.0
}
self.assertEqual(2, bp.get_status())
self.assertEqual(macros_for_nonempty_process, bp.resolve_all_macros())
def testPageLoad(self):
self.loadPage('/bi')
self.loadPage('/bi/add')
self.loadPage('/bi/add/subprocess')
self.loadPage('/bi/add/graph')
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, "Unhandled exception while loading %s: %s" % (url, e))
class TestBusinessProcessLogic(TestCase):
""" This class responsible for testing business classes logic """
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
fd, filename = tempfile.mkstemp()
BusinessProcess._default_filename = filename
def tearDown(self):
self.environment.terminate()
os.remove(BusinessProcess._default_filename)
def testBestAndWorstState(self):
s = BusinessProcess("example process")
s.status_method = 'use_worst_state'
self.assertEqual(3, s.get_status(), _("Empty bi process should have status unknown"))
s.add_process(process_name="always_ok", process_type="businessprocess", status_method='always_ok')
self.assertEqual(0, s.get_status(), _("BI process with one ok subitem, should have state OK"))
s.add_process("fail subprocess", status_method="always_major")
self.assertEqual(2, s.get_status(), _("BI process with one failed item should have a critical state"))
s.status_method = 'use_best_state'
self.assertEqual(0, s.get_status(), _("BI process using use_best_state should be returning OK"))
def testBusinessRules(self):
s = BusinessProcess("example process")
self.assertEqual(3, s.get_status(), _("Empty bi process should have status unknown"))
s.add_process(process_name="always_ok", process_type="businessprocess", status_method='always_ok')
self.assertEqual(0, s.get_status(), _("BI process with one ok subitem, should have state OK"))
s.add_process("untagged process", status_method="always_major")
self.assertEqual(0, s.get_status(), _("BI subprocess that is untagged should yield an ok state"))
s.add_process("not critical process", status_method="always_major", tags="not critical")
self.assertEqual(1, s.get_status(), _("A Non critical subprocess should yield 'minor problem'"))
s.add_process("critical process", status_method="always_major", tags="mission critical")
self.assertEqual(2, s.get_status(), _("A critical process in failed state should yield major problem"))
s.add_process("another noncritical process", status_method="always_major", tags="not critical")
self.assertEqual(2, s.get_status(), _("Adding another non critical subprocess should still yield a critical state"))
class TestDomainProcess(TestCase):
""" Test the Domain business process type
"""
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
def tearDown(self):
self.environment.terminate()
def testHost(self):
domain = get_business_process(process_name='ok.is', process_type='domain')
# We don't exactly know the status of the domain, but lets run it anyway
# for smoketesting
domain.get_status()
class TestServiceProcess(TestCase):
""" Test Service Business process type """
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
def tearDown(self):
self.environment.terminate()
def testService(self):
service = get_business_process('ok_host/ok service 1', process_type='service')
status = service.get_status()
self.assertFalse(service.errors)
self.assertEqual(0, status, "The service should always have status OK")
class TestHostProcess(TestCase):
""" Test the Host business process type
"""
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
def tearDown(self):
self.environment.terminate()
def testNonExistingHost(self):
host = get_business_process('non-existant host', process_type='host')
self.assertEqual(3, host.get_status(), _("non existant host processes should have unknown status"))
def testExistingHost(self):
#localhost = self.livestatus.get_hosts('Filter: host_name = ok_host')
host = get_business_process('ok_host', process_type='host')
self.assertEqual(0, host.get_status(), _("the host ok_host should always has status ok"))
def testDomainProcess(self):
domain = get_business_process(process_name='oksad.is', process_type='domain')
# We don't exactly know the status of the domain, but lets run it anyway
# for smoketesting
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
(r'^/?$', 'bi.views.index'),
(r'^/add/?$', 'bi.views.add'),
(r'^/add/subprocess/?$', 'bi.views.add_subprocess'),
(r'^/add/graph/?$', 'bi.views.add_graph'),
(r'^/(?P<process_name>.+)/edit/status_method$', 'bi.views.change_status_calculation_method'),
(r'^/edit/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.edit'),
(r'^/json/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.json'),
(r'^/graphs/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.graphs_json'),
(r'^/delete/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.delete'),
(r'^/view/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.view'),
#(r'^/view/(?P<process_name>.+)/?$', 'bi.views.view'),
)
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import simplejson
from django.http import HttpResponse
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from adagios.pnp.functions import run_pnp
from adagios.views import adagios_decorator
import adagios.bi
import adagios.bi.forms
from adagios.views import adagios_decorator, error_page
@adagios_decorator
def edit(request, process_name, process_type):
""" Edit one specific business process
"""
messages = []
bp = adagios.bi.get_business_process(process_name)
errors = bp.errors or []
status = bp.get_status()
add_subprocess_form = adagios.bi.forms.AddSubProcess(instance=bp)
form = adagios.bi.forms.BusinessProcessForm(instance=bp, initial=bp.data)
add_graph_form = adagios.bi.forms.AddGraphForm(instance=bp)
if request.method == 'GET':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, initial=bp.data)
elif request.method == 'POST':
if 'save_process' in request.POST:
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, data=request.POST)
if form.is_valid():
form.save()
elif 'remove_process' in request.POST:
removeform = adagios.bi.forms.RemoveSubProcessForm(
instance=bp, data=request.POST)
if removeform.is_valid():
removeform.save()
elif 'add_process' in request.POST:
if form.is_valid():
form.add_process()
elif 'add_graph_submit_button' in request.POST:
add_graph_form = adagios.bi.forms.AddGraphForm(
instance=bp, data=request.POST)
if add_graph_form.is_valid():
add_graph_form.save()
elif 'add_subprocess_submit_button' in request.POST:
add_subprocess_form = adagios.bi.forms.AddSubProcess(
instance=bp, data=request.POST)
if add_subprocess_form.is_valid():
add_subprocess_form.save()
else:
errors.append(_("failed to add subprocess"))
add_subprocess_failed = True
else:
errors.append(
_("I don't know what submit button was clicked. please file a bug."))
# Load the process again, since any of the above probably made changes
# to it.
bp = adagios.bi.get_business_process(process_name)
return render_to_response('business_process_edit.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def add_graph(request):
""" Add one or more graph to a single business process
"""
c = {}
c['errors'] = []
c.update(csrf(request))
if request.method == 'GET':
source = request.GET
else:
source = request.POST
name = source.get('name', None)
if name:
c['name'] = name
bp = adagios.bi.get_business_process(name)
c['graphs'] = []
# Convert every graph= in the querystring into
# host_name,service_description,metric attribute
graphs = source.getlist('graph')
for graph in graphs:
tmp = graph.split(',')
if len(tmp) != 3:
c['errors'].append(_("Invalid graph string: %s") % (tmp))
graph_dict = {}
graph_dict['host_name'] = tmp[0]
graph_dict['service_description'] = tmp[1]
graph_dict['metric_name'] = tmp[2]
graph_dict['notes'] = tmp[2]
c['graphs'].append(graph_dict)
#
# When we get here, we have parsed all the data from the client, if
# its a post, lets add the graphs to our business process
if request.method == 'POST':
if not name:
raise Exception(
_("Booh! you need to supply name= to the querystring"))
for graph in c['graphs']:
form = adagios.bi.forms.AddGraphForm(instance=bp, data=graph)
if form.is_valid():
form.save()
else:
e = form.errors
raise e
return redirect('adagios.bi.views.edit', bp.process_type, bp.name)
return render_to_response('business_process_add_graph.html', c, context_instance=RequestContext(request))
@adagios_decorator
def view(request, process_name, process_type=None):
""" View one specific business process
"""
c = {}
c['messages'] = []
c['errors'] = []
bp = adagios.bi.get_business_process(
process_name, process_type=process_type)
graphs_url = reverse(
'adagios.bi.views.graphs_json', kwargs={"process_type":process_type, "process_name": process_name})
c['bp'] = bp
c['graphs_url'] = graphs_url
return render_to_response('business_process_view.html', c, context_instance=RequestContext(request))
@adagios_decorator
def json(request, process_name=None, process_type=None):
""" Returns a list of all processes in json format.
If process_name is specified, return all sub processes.
"""
if not process_name:
processes = adagios.bi.get_all_processes()
else:
process = adagios.bi.get_business_process(process_name, process_type)
processes = process.get_processes()
result = []
# Turn processes into nice json
for i in processes:
json = {}
json['state'] = i.get_status()
json['name'] = i.name
json['display_name'] = i.display_name
result.append(json)
json = simplejson.dumps(result)
return HttpResponse(json, content_type="application/json")
@adagios_decorator
def graphs_json(request, process_name, process_type):
""" Get graphs for one specific business process
"""
c = {}
c['messages'] = []
c['errors'] = []
import adagios.businessprocess
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
graphs = []
if not bp.graphs:
return HttpResponse('[]')
for graph in bp.graphs or []:
if graph.get('graph_type') == 'pnp':
host_name = graph.get('host_name')
service_description = graph.get('service_description')
metric_name = graph.get('metric_name')
pnp_result = run_pnp('json', host=graph.get(
'host_name'), srv=graph.get('service_description'))
json_data = simplejson.loads(pnp_result)
for i in json_data:
if i.get('ds_name') == graph.get('metric_name'):
notes = graph.get('notes')
last_value = bp.get_pnp_last_value(
host_name, service_description, metric_name)
i['last_value'] = last_value
i['notes'] = notes
graphs.append(i)
graph_json = simplejson.dumps(graphs)
return HttpResponse(graph_json)
@adagios_decorator
def add_subprocess(request):
""" Add subitems to one specific businessprocess
"""
c = {}
c['messages'] = []
c['errors'] = []
c.update(csrf(request))
process_list, parameters = _business_process_parse_querystring(request)
if request.method == 'POST':
if 'name' not in request.POST:
raise Exception(
_("You must specify which subprocess to add all these objects to"))
parameters.pop('name')
bp = adagios.bi.get_business_process(request.POST.get('name'))
# Find all subprocesses in the post, can for each one call add_process
# with all parmas as well
for i in process_list:
process_name = i.get('name')
process_type = i.get('process_type')
bp.add_process(process_name, process_type, **parameters)
c['messages'].append('%s: %s added to %s' %
(process_type, process_name, bp.name))
bp.save()
return redirect('adagios.bi.views.edit', bp.process_type, bp.name)
c['subprocesses'] = process_list
c['parameters'] = parameters
return render_to_response('business_process_add_subprocess.html', c, context_instance=RequestContext(request))
@adagios_decorator
def add(request):
""" View one specific business process
"""
c = {}
c['messages'] = []
c['errors'] = []
import adagios.businessprocess
bp = adagios.bi.BusinessProcess(_("New Business Process"))
if request.method == 'GET':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, initial=bp.data)
elif request.method == 'POST':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, data=request.POST)
if form.is_valid():
form.save()
return redirect('adagios.bi.views.edit', bp.process_type, bp.name)
return render_to_response('business_process_edit.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def index(request):
""" List all configured business processes
"""
c = {}
c['messages'] = []
c['errors'] = []
processes = adagios.bi.get_all_processes()
return render_to_response('business_process_list.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def delete(request, process_name, process_type):
""" Delete one specific business process """
import adagios.businessprocess
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
if request.method == 'POST':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, data=request.POST)
form.delete()
return redirect('adagios.bi.views.index')
return render_to_response('business_process_delete.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def change_status_calculation_method(request, process_name):
import adagios.businessprocess
bp = adagios.bi.get_business_process(process_name)
if request.method == 'POST':
for i in bp.status_calculation_methods:
if i in request.POST:
bp.status_method = i
bp.save()
return redirect('adagios.bi.views.index')
def _business_process_parse_querystring(request):
""" Parses querystring into process_list and parameters
Returns:
(parameters,processs_list) where:
-- process_list is a list of all business processes that were mentioned in the querystring
-- Parameters is a dict of all other querystrings that were not in process_list and not in exclude list
"""
ignored_querystring_parameters = ("csrfmiddlewaretoken")
import adagios.businessprocess
data = {}
if request.method == 'GET':
data = request.GET
elif request.method == 'POST':
data = request.POST
else:
raise Exception(_("Booh, use either get or POST"))
parameters = {}
process_list = []
for key in data:
for value in data.getlist(key):
if key in ignored_querystring_parameters:
continue
type_of_process = adagios.bi.get_class(key, None)
if type_of_process is None:
parameters[key] = value
else:
process_type = type_of_process.process_type
process = adagios.bi.get_business_process(
value, process_type=process_type)
process_list.append(process)
return process_list, parameters
########NEW FILE########
__FILENAME__ = businessprocess
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from adagios.bi import *
########NEW FILE########
__FILENAME__ = context_processors
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pynag.Model
import os
import getpass
from adagios import notifications, settings, add_plugin
from adagios.misc.rest import add_notification, clear_notification
import pynag.Model.EventHandlers
import pynag.Parsers
from pynag.Parsers import Livestatus
import adagios
import adagios.status.utils
from pynag import Model
import time
import datetime
from adagios import __version__
from adagios import userdata
from django.utils.translation import ugettext as _
def on_page_load(request):
""" Collection of actions that take place every page load """
results = {}
for k, v in reload_configfile(request).items():
results[k] = v
for k, v in get_httpuser(request).items():
results[k] = v
for k, v in get_tagged_comments(request).items():
results[k] = v
for k, v in check_nagios_running(request).items():
results[k] = v
for k, v in get_notifications(request).items():
results[k] = v
for k, v in get_unhandled_problems(request).items():
results[k] = v
for k, v in resolve_urlname(request).items():
results[k] = v
for k, v in check_selinux(request).items():
results[k] = v
for k, v in activate_plugins(request).items():
results[k] = v
for k, v in check_destination_directory(request).items():
results[k] = v
for k, v in check_nagios_cfg(request).items():
results[k] = v
for k, v in get_current_time(request).items():
results[k] = v
for k, v in get_okconfig(request).items():
results[k] = v
for k, v in get_nagios_url(request).items():
results[k] = v
for k, v in get_local_user(request).items():
results[k] = v
for k, v in get_current_settings(request).items():
results[k] = v
for k, v in get_plugins(request).items():
results[k] = v
for k, v in get_current_version(request).items():
results[k] = v
for k, v in get_serverside_includes(request).items():
results[k] = v
for k, v in get_user_preferences(request).items():
results[k] = v
for k, v in get_all_backends(request).items():
results[k] = v
for k, v in get_all_nonworking_backends(request).items():
results[k] = v
return results
def get_current_time(request):
""" Make current timestamp available to templates
"""
result = {}
try:
now = datetime.datetime.now()
result['current_time'] = now.strftime("%b %d %H:%M")
result['current_timestamp'] = int(time.time())
except Exception:
return result
return result
def get_serverside_includes(request):
""" Returns a list of serverside includes to include on this page """
result = {}
try:
result['ssi_headers'] = []
result['ssi_footers'] = []
dirname = adagios.settings.serverside_includes
current_url = resolve_urlname(request)
if not dirname:
return {}
if not os.path.isdir(dirname):
return {}
files = os.listdir(dirname)
common_header_file = "common-header.ssi"
common_footer_file = "common-footer.ssi"
custom_header_file = "{urlname}-header.ssi".format(urlname=current_url)
custom_footer_file = "{urlname}-footer.ssi".format(urlname=current_url)
if common_header_file in files:
result['ssi_headers'].append(dirname + "/" + common_header_file)
if common_footer_file in files:
result['ssi_footers'].append(dirname + "/" + common_footer_file)
if custom_header_file in files:
result['ssi_headers'].append(dirname + "/" + custom_header_file)
if custom_footer_file in files:
result['ssi_footers'].append(dirname + "/" + custom_footer_file)
except Exception:
return {}
return result
def activate_plugins(request):
""" Activates any plugins specified in settings.plugins """
for k, v in settings.plugins.items():
add_plugin(name=k, modulepath=v)
return {'misc_menubar_items': adagios.misc_menubar_items, 'menubar_items': adagios.menubar_items}
def get_local_user(request):
""" Return user that is running the adagios process under apache
"""
user = getpass.getuser()
return {'local_user': user}
def get_current_version(request):
""" Returns current adagios version """
return {'adagios_version': __version__}
def get_current_settings(request):
""" Return a copy of adagios.settings
"""
return {'settings': adagios.settings}
def resolve_urlname(request):
"""Allows us to see what the matched urlname for this
request is within the template"""
from django.core.urlresolvers import resolve
try:
res = resolve(request.path)
if res:
return {'urlname': res.url_name}
except Exception:
return {'urlname': 'None'}
def get_httpuser(request):
""" Get the current user that is authenticating to us and update event handlers"""
try:
remote_user = request.META.get('REMOTE_USER', None)
except Exception:
remote_user = "anonymous"
return {'remote_user': remote_user or "anonymous"}
def get_nagios_url(request):
""" Get url to legasy nagios interface """
return {'nagios_url': settings.nagios_url}
def get_tagged_comments(request):
""" (for status view) returns number of comments that mention the remote_user"""
try:
remote_user = request.META.get('REMOTE_USER', 'anonymous')
livestatus = adagios.status.utils.livestatus(request)
tagged_comments = livestatus.query(
'GET comments', 'Stats: comment ~ %s' % remote_user, columns=False)[0]
if tagged_comments > 0:
return {'tagged_comments': tagged_comments}
else:
return {}
except Exception:
return {}
def get_unhandled_problems(request):
""" Get number of any unhandled problems via livestatus """
results = {}
try:
livestatus = adagios.status.utils.livestatus(request)
num_problems = livestatus.query('GET services',
'Filter: state != 0',
'Filter: acknowledged = 0',
'Filter: host_acknowledged = 0',
'Filter: scheduled_downtime_depth = 0',
'Filter: host_scheduled_downtime_depth = 0',
'Stats: state != 0',
'Stats: host_state != 0',
columns=False)
results['num_problems'] = num_problems[0] + num_problems[1]
results['num_unhandled_problems'] = num_problems[0] + num_problems[1]
result = livestatus.query('GET services',
'Stats: state != 0',
'Stats: state != 0',
'Stats: acknowledged = 0',
'Stats: scheduled_downtime_depth = 0',
'Stats: host_state = 0',
'StatsAnd: 4',
columns=False
)
num_service_problems_all = result[0]
num_service_problems_unhandled = result[1]
result = livestatus.query('GET hosts',
'Stats: state != 0',
'Stats: state != 0',
'Stats: acknowledged = 0',
'Stats: scheduled_downtime_depth = 0',
'Stats: host_state = 1',
'StatsAnd: 4',
columns=False
)
num_host_problems_all = result[0]
num_host_problems_unhandled = result[1]
num_problems_all = num_service_problems_all + num_host_problems_all
num_problems_unhandled = num_service_problems_unhandled + num_host_problems_unhandled
num_problems = num_problems_unhandled
results = locals()
del results['livestatus']
del results['result']
del results['request']
except Exception:
pass
return results
def check_nagios_cfg(request):
""" Check availability of nagios.cfg """
return {'nagios_cfg': pynag.Model.config.cfg_file}
def check_destination_directory(request):
""" Check that adagios has a place to store new objects """
dest = settings.destination_directory
dest_dir_was_found = False
# If there are problems with finding nagios.cfg, we don't
# need to display any errors here regarding destination_directories
try:
Model.config.parse_maincfg()
except Exception:
return {}
for k, v in Model.config.maincfg_values:
if k != 'cfg_dir':
continue
if os.path.normpath(v) == os.path.normpath(dest):
dest_dir_was_found = True
if not dest_dir_was_found:
add_notification(level="warning", notification_id="dest_dir",
message=_("Destination for new objects (%s) is not defined in nagios.cfg") % dest)
elif not os.path.isdir(dest):
add_notification(level="warning", notification_id="dest_dir",
message=_("Destination directory for new objects (%s) is not found. Please create it.") % dest)
else:
clear_notification(notification_id="dest_dir")
return {}
def check_nagios_running(request):
""" Notify user if nagios is not running """
try:
if pynag.Model.config is None:
pynag.Model.config = pynag.Parsers.config(
adagios.settings.nagios_config)
nagios_pid = pynag.Model.config._get_pid()
return {"nagios_running": (nagios_pid is not None)}
except Exception:
return {}
def check_selinux(request):
""" Check if selinux is enabled and notify user """
notification_id = "selinux_active"
if settings.warn_if_selinux_is_active:
try:
if open('/sys/fs/selinux/enforce', 'r').readline().strip() == "1":
add_notification(
level="warning",
message=_('SELinux is enabled, which is likely to give your monitoring engine problems., see <a href="https://access.redhat.com/knowledge/docs/en-US/Red_Hat_Enterprise_Linux/6/html-single/Security-Enhanced_Linux/index.html#sect-Security-Enhanced_Linux-Enabling_and_Disabling_SELinux-Disabling_SELinux">here</a> for information on how to disable it.'),
notification_id=notification_id,
)
except Exception:
pass
else:
clear_notification(notification_id)
return {}
def get_notifications(request):
""" Returns a hash map of adagios.notifications """
return {"notifications": notifications}
def get_okconfig(request):
""" Returns {"okconfig":True} if okconfig module is installed.
"""
try:
if "okconfig" in settings.plugins:
return {"okconfig": True}
return {}
except Exception:
return {}
def get_plugins(request):
"""
"""
return {'plugins': settings.plugins}
def reload_configfile(request):
""" Load the configfile from settings.adagios_configfile and put its content in adagios.settings. """
try:
clear_notification("configfile")
locals = {}
execfile(settings.adagios_configfile, globals(), locals)
for k, v in locals.items():
settings.__dict__[k] = v
except Exception, e:
add_notification(
level="warning", message=str(e), notification_id="configfile")
return {}
def get_user_preferences(request):
""" Loads the preferences for the logged-in user. """
def theme_to_themepath(theme):
return os.path.join(settings.THEMES_FOLDER,
theme,
settings.THEME_ENTRY_POINT)
try:
user = userdata.User(request)
user.trigger_hooks()
results = user.to_dict()
except Exception:
results = adagios.settings.PREFS_DEFAULT
theme = results.get('theme', 'default')
results['theme_path'] = theme_to_themepath(theme)
return {'user_data': results}
def get_all_backends(request):
backends = adagios.status.utils.get_all_backends()
return {'backends': backends}
def get_all_nonworking_backends(request):
""" Returns the backends which don't answer at the time. """
b = [x for x in get_all_backends(request)['backends']
if not Livestatus(x).test(raise_error=False)]
return {'nonworking_backends': b}
if __name__ == '__main__':
on_page_load(request=None)
########NEW FILE########
__FILENAME__ = forms
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from django.utils import unittest
from django.test.client import Client
import pynag.Parsers
import tempfile
import os
from adagios.contrib import get_template_name
import pynag.Utils
class ContribTests(unittest.TestCase):
def setUp(self):
base_path = tempfile.mkdtemp()
self.base_path = base_path
def tearDown(self):
command = ['rm', '-rf', self.base_path]
pynag.Utils.runCommand(command=command, shell=False)
def testGetTemplateFilename(self):
base_path = self.base_path
file1 = base_path + '/file1'
dir1 = base_path + '/dir1'
file2 = dir1 + '/file2'
open(file1, 'w').write('this is file1')
os.mkdir(dir1)
open(file2, 'w').write('this is file2')
self.assertEqual(file1, get_template_name(base_path, 'file1'))
self.assertEqual(file2, get_template_name(base_path, 'dir1', 'file2'))
self.assertEqual(file2, get_template_name(base_path, 'dir1', 'file2', 'unneeded_argument'))
# Try to return a filename that is outside base_path
exception1 = lambda: get_template_name(base_path, '/etc/passwd')
self.assertRaises(Exception, exception1)
# Try to return a filename that is outside base_path
exception2 = lambda: get_template_name(base_path, '/etc/', 'passwd')
self.assertRaises(Exception, exception2)
# Try to return a filename that is outside base_path
exception3 = lambda: get_template_name(base_path, '..', 'passwd')
self.assertRaises(Exception, exception3)
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
(r'^/$', 'contrib.views.index'),
(r'^/(?P<arg1>.+)?$', 'contrib.views.contrib'),
(r'^/(?P<arg1>.+)/(?P<arg2>.+)/?$', 'contrib.views.contrib'),
(r'^/(?P<arg1>.+)(?P<arg2>.+)/(?P<arg3>.+)/?$', 'contrib.views.contrib'),
(r'^/(?P<arg1>.+)(?P<arg2>.+)/(?P<arg3>.+)/(?P<arg4>.+)/?$', 'contrib.views.contrib'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.context_processors import csrf
from django.shortcuts import render_to_response
from django.shortcuts import HttpResponse
import adagios.settings
import adagios.status.utils
import os
from adagios.views import adagios_decorator, error_page
from django.template import RequestContext
from adagios.contrib import get_template_name
from django import template
from django.utils.translation import ugettext as _
@adagios_decorator
def index(request, contrib_dir=None):
""" List all available user contributed views in adagios.settings.contrib_dir """
messages = []
errors = []
if not contrib_dir:
contrib_dir = adagios.settings.contrib_dir
views = os.listdir(contrib_dir)
if not views:
errors.append(_("Directory '%s' is empty") % contrib_dir)
return render_to_response("contrib_index.html", locals(), context_instance=RequestContext(request))
@adagios_decorator
def contrib(request, arg1, arg2=None, arg3=None, arg4=None):
messages = []
errors = []
full_path = get_template_name(adagios.settings.contrib_dir, arg1, arg2, arg3, arg4)
if os.path.isdir(full_path):
return index(request, contrib_dir=full_path)
with open(full_path) as f:
content = f.read()
# Lets populate local namespace with convenient data
services = lambda: locals().get('services', adagios.status.utils.get_services(request))
hosts = lambda: locals().get('hosts', adagios.status.utils.get_hosts(request))
service_problems = lambda: locals().get('service_problems', adagios.status.utils.get_hosts(request, state__isnot='0'))
host_problems = lambda: locals().get('host_problems', adagios.status.utils.get_hosts(request, state__isnot='0'))
statistics = lambda: locals().get('statistics', adagios.status.utils.get_statistics(request))
t = template.Template(content)
c = RequestContext(request, locals())
html = t.render(c)
return HttpResponse(html)
########NEW FILE########
__FILENAME__ = exceptions
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Exceptions that Adagios uses and raises
"""
class AdagiosError(Exception):
""" Base Class for all Adagios Exceptions """
pass
class AccessDenied(AdagiosError):
""" This exception is raised whenever a user tries to access a page he does not have access to. """
def __init__(self, username, access_required, message, path=None, *args, **kwargs):
self.username = username
self.access_required = access_required
self.message = message
self.path = path
super(AccessDenied, self).__init__(message, *args, **kwargs)
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.encoding import smart_str
from django import forms
class AdagiosForm(forms.Form):
""" Base class for all forms in this module. Forms that use pynag in any way should inherit from this one.
"""
def clean(self):
cleaned_data = {}
tmp = super(AdagiosForm, self).clean()
for k,v in tmp.items():
if isinstance(k, (unicode)):
k = smart_str(k)
if isinstance(v, (unicode)):
v = smart_str(v)
cleaned_data[k] = v
return cleaned_data
########NEW FILE########
__FILENAME__ = manage
#!/usr/bin/python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.core.mail import send_mail
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
import os.path
from adagios import settings
import adagios.utils
from pynag import Model, Control
from django.core.mail import EmailMultiAlternatives
import pynag.Parsers
import pynag.Control.Command
TOPIC_CHOICES = (
('general', _('General Suggestion')),
('bug', _('I think i have found a bug')),
('suggestion', _('I have a particular task in mind that i would like to do with Adagios')),
('easier', _('I have an idea how make a certain task easier to do')),
)
pnp_loglevel_choices = [
('0', _('0 - Only Errors')),
('1', _('1 - Little logging')),
('2', _('2 - Log Everything')),
('-1', _('-1 Debug mode (log all and slower processing'))
]
pnp_log_type_choices = [('syslog', 'syslog'), ('file', 'file')]
COMMAND_CHOICES = [('reload', 'reload'), ('status', 'status'),
('restart', 'restart'), ('stop', 'stop'), ('start', 'start')]
initial_paste = """
define service {
host_name host01.example.com
service_description http://host01.example.com
use template-http
}
define service {
name template-http
check_command okc-check_http
}
"""
class ContactUsForm(forms.Form):
topic = forms.ChoiceField(choices=TOPIC_CHOICES)
sender = forms.CharField(
required=False,
help_text=_("Optional email address if you want feedback from us"),
)
message = forms.CharField(
widget=forms.widgets.Textarea(
attrs={'rows': 15, 'cols': 40}),
help_text=_("See below for examples of good suggestions"),
)
def save(self):
from_address = 'adagios@adagios.opensource.is'
to_address = ["palli@ok.is"]
subject = _("Suggestion from Adagios")
sender = self.cleaned_data['sender']
topic = self.cleaned_data['topic']
message = self.cleaned_data['message']
msg = _("""
topic: %(topic)s
from: %(sender)s
%(message)s
""") % {'topic': topic, 'sender': sender, 'message': message}
send_mail(subject, msg, from_address, to_address, fail_silently=False)
class UserdataForm(forms.Form):
language = forms.ChoiceField(
choices=settings.LANGUAGES,
required=False
)
theme = forms.ChoiceField(
choices=[(x, x) for x in adagios.utils.get_available_themes()],
required=False
)
refresh_rate = forms.IntegerField(
help_text="For pages that auto-reload. Set the number of seconds to wait between page refreshes. "
"Set refresh rate to 0 to disable automatic refreshing.",
required=False,
)
class AdagiosSettingsForm(forms.Form):
nagios_config = forms.CharField(
required=False, initial=settings.nagios_config,
help_text=_("Path to nagios configuration file. i.e. /etc/nagios/nagios.cfg"))
destination_directory = forms.CharField(
required=False, initial=settings.destination_directory, help_text=_("Where to save new objects that adagios creates."))
nagios_url = forms.CharField(required=False, initial=settings.nagios_url,
help_text=_("URL (relative or absolute) to your nagios webcgi. Adagios will use this to make it simple to navigate from a configured host/service directly to the cgi."))
nagios_init_script = forms.CharField(
help_text=_("Path to you nagios init script. Adagios will use this when stopping/starting/reloading nagios"))
nagios_binary = forms.CharField(
help_text=_("Path to you nagios daemon binary. Adagios will use this to verify config with 'nagios -v nagios_config'"))
livestatus_path = forms.CharField(
help_text=_("Path to MK Livestatus socket. If left empty Adagios will try to autodiscover from your nagios.cfg"),
required=False,
)
enable_githandler = forms.BooleanField(
required=False, initial=settings.enable_githandler, help_text=_("If set. Adagios will commit any changes it makes to git repository."))
enable_loghandler = forms.BooleanField(
required=False, initial=settings.enable_loghandler, help_text=_("If set. Adagios will log any changes it makes to a file."))
enable_authorization = forms.BooleanField(
required=False, initial=settings.enable_authorization,
help_text=_("If set. Users in Status view will only see hosts/services they are a contact for. Unset means everyone will see everything."))
enable_status_view = forms.BooleanField(
required=False, initial=settings.enable_status_view,
help_text=_("If set. Enable status view which is an alternative to nagios legacy web interface. You will need to restart web server for the changes to take effect"))
auto_reload = forms.BooleanField(
required=False, initial=settings.auto_reload,
help_text=_("If set. Nagios is reloaded automatically after every change."))
warn_if_selinux_is_active = forms.BooleanField(
required=False, help_text=_("Adagios does not play well with SElinux. So lets issue a warning if it is active. Only disable this if you know what you are doing."))
pnp_filepath = forms.CharField(
help_text=_("Full path to your pnp4nagios/index.php file. Adagios will use this to generate graphs"))
pnp_url = forms.CharField(
help_text=_("Full or relative url to pnp4nagios web interface, adagios can use this to link directly to pnp"))
map_center = forms.CharField(
help_text=_("Default coordinates when opening up the world map. This should be in the form of longitude,latitude"))
map_zoom = forms.CharField(
help_text=_("Default Zoom level when opening up the world map. 10 is a good default value"))
language = forms.ChoiceField(choices=settings.LANGUAGES, required=False)
theme = forms.ChoiceField(required=False, choices=[(x,x) for x in adagios.utils.get_available_themes()])
refresh_rate = forms.IntegerField(
help_text="For pages that auto-reload. Set the number of seconds to wait between page refreshes. "
"Set refresh rate to 0 to disable automatic refreshing."
)
enable_graphite = forms.BooleanField(required=False, help_text="If set. Include graphite graphs in status views")
graphite_url = forms.CharField(help_text="Path to your graphite install.", required=False)
graphite_querystring = forms.CharField(help_text="Querystring that is passed into graphite's /render method. {host} is replaced with respective hostname while {host_} will apply common graphite escaping. i.e. example.com -> example_com", required=False)
graphite_title = forms.CharField(help_text="Use this title on all graphs coming from graphite", required=False)
include = forms.CharField(
required=False, help_text=_("Include configuration options from files matching this pattern"))
def save(self):
# First of all, if configfile does not exist, lets try to create it:
if not os.path.isfile(settings.adagios_configfile):
open(settings.adagios_configfile, 'w').write(
_("# Autocreated by adagios"))
for k, v in self.cleaned_data.items():
Model.config._edit_static_file(
attribute=k, new_value=v, filename=settings.adagios_configfile)
self.adagios_configfile = settings.adagios_configfile
#settings.__dict__[k] = v
def __init__(self, *args, **kwargs):
# Since this form is always bound, lets fetch current configfiles and
# prepare them as post:
if 'data' not in kwargs or kwargs['data'] == '':
kwargs['data'] = settings.__dict__
super(self.__class__, self).__init__(*args, **kwargs)
def clean_pnp_filepath(self):
filename = self.cleaned_data['pnp_filepath']
return self.check_file_exists(filename)
def clean_destination_directory(self):
filename = self.cleaned_data['destination_directory']
return self.check_file_exists(filename)
def clean_nagios_init_script(self):
filename = self.cleaned_data['nagios_init_script']
if filename.startswith('sudo'):
self.check_file_exists(filename.split()[1])
else:
self.check_file_exists(filename)
return filename
def clean_nagios_binary(self):
filename = self.cleaned_data['nagios_binary']
return self.check_file_exists(filename)
def clean_nagios_config(self):
filename = self.cleaned_data['nagios_config']
return self.check_file_exists(filename)
def check_file_exists(self, filename):
""" Raises validation error if filename does not exist """
if not os.path.exists(filename):
raise forms.ValidationError('No such file or directory')
return filename
def clean(self):
cleaned_data = super(self.__class__, self).clean()
for k, v in cleaned_data.items():
# Convert all unicode to quoted strings
if type(v) == type(u''):
cleaned_data[k] = str('''"%s"''' % v)
# Convert all booleans to True/False strings
elif type(v) == type(False):
cleaned_data[k] = str(v)
return cleaned_data
class EditAllForm(forms.Form):
""" This form intelligently modifies all attributes of a specific type.
"""
def __init__(self, object_type, attribute, new_value, *args, **kwargs):
self.object_type = object_type
self.attribute = attribute
self.new_value = new_value
super(self.__class__, self).__init__(self, args, kwargs)
search_filter = {}
search_filter['object_type'] = object_type
search_filter['%s__isnot' % attribute] = new_value
items = Model.ObjectDefinition.objects.filter(**search_filter)
interesting_objects = []
for i in items:
if attribute in i._defined_attributes or i.use is None:
interesting_objects.append(i)
self.interesting_objects = interesting_objects
for i in interesting_objects:
self.fields['modify_%s' % i.get_id()] = forms.BooleanField(
required=False, initial=True)
class PNPActionUrlForm(forms.Form):
""" This form handles applying action_url to bunch of hosts and services """
#apply_action_url = forms.BooleanField(required=False,initial=True,help_text="If set, apply action_url to every service object in nagios")
action_url = forms.CharField(
required=False, initial="/pnp4nagios/graph?host=$HOSTNAME$&srv=$SERVICEDESC$",
help_text=_("Reset the action_url attribute of every service check in your nagios configuration with this one. "))
def save(self):
action_url = self.cleaned_data['action_url']
services = Model.Service.objects.filter(action_url__isnot=action_url)
self.total_services = len(services)
self.error_services = 0
for i in services:
if 'action_url' in i._defined_attributes or i.use is None:
i.action_url = action_url
try:
i.save()
except Exception:
self.error_services += 1
class PNPTemplatesForm(forms.Form):
""" This form manages your pnp4nagios templates """
def __init__(self, *args, **kwargs):
self.template_directories = []
self.templates = []
tmp = Model.config._load_static_file('/etc/pnp4nagios/config.php')
for k, v in tmp:
if k == "$conf['template_dirs'][]":
# strip all ' and " from directory
directory = v.strip(";").strip('"').strip("'")
self.template_directories.append(directory)
if os.path.isdir(directory):
for f in os.listdir(directory):
self.templates.append("%s/%s" % (directory, f))
super(self.__class__, self).__init__(*args, **kwargs)
class PNPConfigForm(forms.Form):
""" This form handles the npcd.cfg configuration file """
user = forms.CharField(
help_text=_("npcd service will have privileges of this group"))
group = forms.CharField(
help_text=_("npcd service will have privileges of this user"))
log_type = forms.ChoiceField(
widget=forms.RadioSelect, choices=pnp_log_type_choices, help_text=_("Define if you want to log to 'syslog' or 'file'"))
log_file = forms.CharField(
help_text=_("If log_type is set to file. Log to this file"))
max_logfile_size = forms.IntegerField(
help_text=_("Defines the maximum filesize (bytes) before logfile will rotate."))
log_level = forms.ChoiceField(
help_text=_("How much should we log?"), choices=pnp_loglevel_choices)
perfdata_spool_dir = forms.CharField(
help_text=_("where we can find the performance data files"))
perfdata_file_run_cmd = forms.CharField(
help_text=_("execute following command for each found file in perfdata_spool_dir"))
perfdata_file_run_cmd_args = forms.CharField(
required=False, help_text=_("optional arguments to perfdata_file_run_cmd"))
identify_npcd = forms.ChoiceField(widget=forms.RadioSelect, choices=(
('1', 'Yes'), ('0', 'No')), help_text=_("If yes, npcd will append -n to the perfdata_file_run_cmd"))
npcd_max_threads = forms.IntegerField(
help_text=_("Define how many parallel threads we should start"))
sleep_time = forms.IntegerField(
help_text=_("How many seconds npcd should wait between dirscans"))
load_threshold = forms.FloatField(
help_text=_("npcd won't start if load is above this threshold"))
pid_file = forms.CharField(help_text=_("Location of your pid file"))
perfdata_file = forms.CharField(
help_text=_("Where should npcdmod.o write the performance data. Must not be same directory as perfdata_spool_dir"))
perfdata_spool_filename = forms.CharField(
help_text=_("Filename for the spooled files"))
perfdata_file_processing_interval = forms.IntegerField(
help_text=_("Interval between file processing"))
def __init__(self, initial=None, *args, **kwargs):
if not initial:
initial = {}
my_initial = {}
# Lets use PNPBrokerModuleForm to find sensible path to npcd config
# file
broker_form = PNPBrokerModuleForm()
self.npcd_cfg = broker_form.initial.get('config_file')
npcd_values = Model.config._load_static_file(self.npcd_cfg)
for k, v in npcd_values:
my_initial[k] = v
super(self.__class__, self).__init__(
initial=my_initial, *args, **kwargs)
def save(self):
for i in self.changed_data:
Model.config._edit_static_file(
attribute=i, new_value=self.cleaned_data[i], filename=self.npcd_cfg)
class EditFileForm(forms.Form):
""" Manages editing of a single file """
filecontent = forms.CharField(widget=forms.Textarea(
attrs={'wrap': 'off', 'rows': '50', 'cols': '2000'}))
def __init__(self, filename, initial=None, *args, **kwargs):
if not initial:
initial = {}
self.filename = filename
my_initial = initial.copy()
if 'filecontent' not in my_initial:
my_initial['filecontent'] = open(filename).read()
super(self.__class__, self).__init__(
initial=my_initial, *args, **kwargs)
def save(self):
if 'filecontent' in self.changed_data:
data = self.cleaned_data['filecontent']
open(self.filename, 'w').write(data)
class PNPBrokerModuleForm(forms.Form):
""" This form is responsible for configuring PNP4Nagios. """
#enable_pnp= forms.BooleanField(required=False, initial=True,help_text="If set, PNP will be enabled and will graph Nagios Performance Data.")
broker_module = forms.CharField(
help_text=_("Full path to your npcdmod.o broker module that shipped with your pnp4nagios installation"))
config_file = forms.CharField(
help_text=_("Full path to your npcd.cfg that shipped with your pnp4nagios installation"))
event_broker_options = forms.IntegerField(
initial="-1", help_text=_("Nagios's default of -1 is recommended here. PNP Documentation says you will need at least bits 2 and 3. Only change this if you know what you are doing."))
process_performance_data = forms.BooleanField(
required=False, initial=True, help_text=_("PNP Needs the nagios option process_performance_data enabled to function. Make sure it is enabled."))
#apply_action_url = forms.BooleanField(required=False,initial=True,help_text="If set, apply action_url to every service object in nagios")
#action_url=forms.CharField(required=False,initial="/pnp4nagios/graph?host=$HOSTNAME$&srv=$SERVICEDESC$", help_text="Action url that your nagios objects can use to access perfdata")
def clean_broker_module(self):
""" Raises validation error if filename does not exist """
filename = self.cleaned_data['broker_module']
if not os.path.exists(filename):
raise forms.ValidationError('File not found')
return filename
def clean_config_file(self):
""" Raises validation error if filename does not exist """
filename = self.cleaned_data['config_file']
if not os.path.exists(filename):
raise forms.ValidationError('File not found')
return filename
def __init__(self, initial=None, *args, **kwargs):
if not initial:
initial = {}
my_initial = {}
Model.config.parse()
maincfg_values = Model.config.maincfg_values
self.nagios_configline = None
for k, v in Model.config.maincfg_values:
if k == 'broker_module' and v.find('npcdmod.o') > 0:
self.nagios_configline = v
v = v.split()
my_initial['broker_module'] = v.pop(0)
for i in v:
if i.find('config_file=') > -1:
my_initial['config_file'] = i.split('=', 1)[1]
elif k == "event_broker_options":
my_initial[k] = v
# If view specified any initial values, they overwrite ours
for k, v in initial.items():
my_initial[k] = v
if 'broker_module' not in my_initial:
my_initial['broker_module'] = self.get_suggested_npcdmod_path()
if 'config_file' not in my_initial:
my_initial['config_file'] = self.get_suggested_npcd_path()
super(self.__class__, self).__init__(
initial=my_initial, *args, **kwargs)
def get_suggested_npcdmod_path(self):
""" Returns best guess for full path to npcdmod.o file """
possible_locations = [
"/usr/lib/pnp4nagios/npcdmod.o",
"/usr/lib64/nagios/brokers/npcdmod.o",
]
for i in possible_locations:
if os.path.isfile(i):
return i
return possible_locations[-1]
def get_suggested_npcd_path(self):
""" Returns best guess for full path to npcd.cfg file """
possible_locations = [
"/etc/pnp4nagios/npcd.cfg"
]
for i in possible_locations:
if os.path.isfile(i):
return i
return possible_locations[-1]
def save(self):
if 'broker_module' in self.changed_data or 'config_file' in self.changed_data or self.nagios_configline is None:
v = "%s config_file=%s" % (
self.cleaned_data['broker_module'], self.cleaned_data['config_file'])
Model.config._edit_static_file(
attribute="broker_module", new_value=v, old_value=self.nagios_configline, append=True)
# We are supposed to handle process_performance_data attribute.. lets
# do that here
process_performance_data = "1" if self.cleaned_data[
'process_performance_data'] else "0"
Model.config._edit_static_file(
attribute="process_performance_data", new_value=process_performance_data)
# Update event broker only if it has changed
name = "event_broker_options"
if name in self.changed_data:
Model.config._edit_static_file(
attribute=name, new_value=self.cleaned_data[name])
class PluginOutputForm(forms.Form):
plugin_output = forms.CharField(
widget=forms.Textarea(attrs={'wrap': 'off', 'cols': '80'}))
def parse(self):
from pynag import Utils
plugin_output = self.cleaned_data['plugin_output']
output = Utils.PluginOutput(plugin_output)
self.results = output
class NagiosServiceForm(forms.Form):
""" Maintains control of the nagios service / reload / restart / etc """
#path_to_init_script = forms.CharField(help_text="Path to your nagios init script", initial=NAGIOS_INIT)
#nagios_binary = forms.CharField(help_text="Path to your nagios binary", initial=NAGIOS_BIN)
#command = forms.ChoiceField(choices=COMMAND_CHOICES)
def save(self):
#nagios_bin = self.cleaned_data['nagios_bin']
if "reload" in self.data:
command = "reload"
elif "restart" in self.data:
command = "restart"
elif "stop" in self.data:
command = "stop"
elif "start" in self.data:
command = "start"
elif "status" in self.data:
command = "status"
elif "verify" in self.data:
command = "verify"
else:
raise Exception(_("Unknown command"))
self.command = command
nagios_init = settings.nagios_init_script
nagios_binary = settings.nagios_binary
nagios_config = settings.nagios_config or pynag.Model.config.cfg_file
if command == "verify":
command = "%s -v '%s'" % (nagios_binary, nagios_config)
else:
command = "%s %s" % (nagios_init, command)
code, stdout, stderr = pynag.Utils.runCommand(command)
self.stdout = stdout or ""
self.stderr = stderr or ""
self.exit_code = code
def verify(self):
""" Run "nagios -v nagios.cfg" and returns errors/warning
Returns:
[
{'errors': []},
{'warnings': []}
]
"""
nagios_binary = settings.nagios_binary
nagios_config = settings.nagios_config
command = "%s -v '%s'" % (nagios_binary, nagios_config)
code, stdout, stderr = pynag.Utils.runCommand(command)
self.stdout = stdout or None
self.stderr = stderr or None
self.exit_code = code
for line in stdout.splitlines():
line = line.strip()
warnings = []
errors = []
if line.lower.startswith('warning:'):
warning = {}
class SendEmailForm(forms.Form):
""" Form used to send email to one or more contacts regarding particular services
"""
to = forms.CharField(
required=True,
help_text=_("E-mail address"),
)
message = forms.CharField(
widget=forms.widgets.Textarea(attrs={'rows': 15, 'cols': 40}),
required=False,
help_text=_("Message that is to be sent to recipients"),
)
add_myself_to_cc = forms.BooleanField(
required=False,
help_text=_("If checked, you will be added automatically to CC")
)
acknowledge_all_problems = forms.BooleanField(
required=False,
help_text=_("If checked, also acknowledge all problems as they are sent")
)
def __init__(self, remote_user, *args, **kwargs):
""" Create a new instance of SendEmailForm, contact name and email is used as from address.
"""
self.remote_user = remote_user
#self.contact_email = contact_email
self.html_content = _("There is now HTML content with this message.")
self.services = []
self.hosts = []
self.status_objects = []
self._resolve_remote_user(self.remote_user)
super(self.__class__, self).__init__(*args, **kwargs)
def save(self):
subject = _("%s sent you a a message through adagios") % self.remote_user
cc_address = []
from_address = self._resolve_remote_user(self.remote_user)
# Check if _resolve_remote_user did in fact return an email address - avoid SMTPSenderRefused.
import re # re built in Py1.5+
if re.compile('([\w\-\.]+@(\w[\w\-]+\.)+[\w\-]+)').search(from_address) is None:
from_address = str(from_address) + '@no.domain'
to_address = self.cleaned_data['to']
to_address = to_address.split(',')
text_content = self.cleaned_data['message']
text_content = text_content.replace('\n','<br>')
# self.html_content is rendered in misc.views.mail()
html_content = text_content + "<p></p>" + self.html_content
if self.cleaned_data['add_myself_to_cc']:
cc_address.append(from_address)
if self.cleaned_data['acknowledge_all_problems']:
comment = _("Sent mail to %s") % self.cleaned_data['to']
self.acknowledge_all_services(comment)
self.acknowledge_all_hosts(comment)
# Here we actually send some email:
msg = EmailMultiAlternatives(
subject=subject, body=text_content, from_email=from_address, cc=cc_address, to=to_address)
msg.attach_alternative(html_content, "text/html")
msg.send()
def acknowledge_all_hosts(self, comment):
""" Acknowledge all problems in self.hosts
"""
for i in self.hosts:
host_name = i.get('host_name')
sticky = "1"
persistent = "0"
notify = "0"
author = self.remote_user
pynag.Control.Command.acknowledge_host_problem(host_name=host_name,
sticky=sticky,
persistent=persistent,
notify=notify,
author=author,
comment=comment)
def acknowledge_all_services(self, comment):
""" Acknowledge all problems in self.services
"""
for i in self.services:
host_name = i.get('host_name')
service_description = i.get('description')
sticky = "1"
persistent = "0"
notify = "0"
author = self.remote_user
pynag.Control.Command.acknowledge_svc_problem(host_name=host_name,
service_description=service_description,
sticky=sticky,
persistent=persistent,
notify=notify,
author=author,
comment=comment)
def _resolve_remote_user(self, username):
""" Returns a valid "Full Name <email@example.com>" for remote http authenticated user.
If Remote user is a nagios contact, then return: Contact_Alias <contact_email>"
Else if remote user is a valid email address, return that address
Else return None
"""
import adagios.status.utils
livestatus = adagios.status.utils.livestatus(request=None)
try:
contact = livestatus.get_contact(username)
return "%s <%s>" % (contact.get('alias'), contact.get('email'))
except IndexError:
# If we get here, then remote_user does not exist as a contact.
return username
class PasteForm(forms.Form):
paste = forms.CharField(initial=initial_paste, widget=forms.Textarea())
def parse(self):
c = pynag.Parsers.config()
self.config = c
c.reset()
paste = self.cleaned_data['paste']
# Also convert raw paste into a string so we can display errors at the
# right place:
self.pasted_string = paste.splitlines()
items = c.parse_string(paste)
c.pre_object_list = items
c._post_parse()
all_objects = []
for object_type, objects in c.data.items():
model = pynag.Model.string_to_class.get(
object_type, pynag.Model.ObjectDefinition)
for i in objects:
Class = pynag.Model.string_to_class.get(
i['meta']['object_type'])
my_object = Class(item=i)
all_objects.append(my_object)
self.objects = all_objects
########NEW FILE########
__FILENAME__ = helpers
#!/usr/bin/python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Convenient stateless functions for pynag. This module is used by the /rest/ interface of adagios.
"""
import platform
import re
from pynag import Model
from pynag import Parsers
from pynag import Control
from pynag import Utils
from pynag import __version__
from socket import gethostbyname_ex
import adagios.settings
from django.utils.translation import ugettext as _
#_config = Parsers.config(adagios.settings.nagios_config)
#_config.parse()
version = __version__
def _get_dict(x):
x.__delattr__('objects')
return x._original_attributes
def get_objects(object_type=None, with_fields="id,shortname,object_type", **kwargs):
""" Get any type of object definition in a dict-compatible fashion
Arguments:
object_type (optional) -- Return objects of this type
with_fields (optional) -- comma seperated list of objects to show (default=id,shortname,object_type)
any other argument is passed on as a filter to pynag
Examples:
# show all active hosts and their ip address
get_objects(object_type="host", register="1", with_fields="host_name,address")
# show all attributes of all services
get_objects(object_type="service", with_fields='*')
Returns:
List of ObjectDefinition
"""
tmp = Model.ObjectDefinition.objects.filter(
object_type=object_type, **kwargs)
with_fields = with_fields.split(',')
# return map(lambda x: _get_dict(x), tmp)
return map(lambda x: object_to_dict(x, attributes=with_fields), tmp)
def servicestatus(with_fields="host_name,service_description,current_state,plugin_output"):
""" Returns a list of all active services and their current status """
s = Parsers.status()
s.parse()
fields = with_fields.split(',')
result_list = []
for serv in s.data['servicestatus']:
current_object = {}
for k, v in serv.items():
if fields == ['*'] or k in fields:
current_object[k] = v
result_list.append(current_object)
return result_list
def object_to_dict(object, attributes="id,shortname,object_type"):
""" Takes in a specific object definition, returns a hash maps with "attributes" as keys"""
result = {}
if not attributes or attributes == '*':
return object._original_attributes
elif isinstance(attributes, list):
pass
else:
attributes = attributes.split(',')
for k in attributes:
result[k] = object[k]
return result
def get_object(id, with_fields="id,shortname,object_type"):
"""Returns one specific ObjectDefinition"""
o = Model.ObjectDefinition.objects.get_by_id(id)
return object_to_dict(o, attributes=with_fields)
def delete_object(object_id, recursive=False, cleanup_related_items=True):
""" Delete one specific ObjectDefinition
Arguments:
object_id -- The pynag id of the definition you want to delete
cleanup_related_items -- If True, clean up references to this object in other definitions
recursive -- If True, also remove other objects that depend on this one.
For example, when deleting a host, also delete all its services
Returns:
True on success. Raises exception on failure.
"""
o = Model.ObjectDefinition.objects.get_by_id(object_id)
o.delete(recursive=recursive, cleanup_related_items=cleanup_related_items)
return True
def get_host_names(invalidate_cache=False):
""" Returns a list of all hosts """
if invalidate_cache is True:
raise NotImplementedError()
all_hosts = Model.Host.objects.all
hostnames = []
for i in all_hosts:
if not i['host_name'] is None:
hostnames.append(i['host_name'])
return sorted(hostnames)
def change_attribute(id, attribute_name, new_value):
"""Changes object with the designated ID to file
Arguments:
id -- object_id of the definition to be saved
attribute_name -- name of the attribute (i.e. "host_name")
new_value -- new value (i.e. "host.example.com")
"""
o = Model.ObjectDefinition.objects.get_by_id(id)
o[attribute_name] = new_value
o.save()
def change_service_attribute(identifier, new_value):
"""
Change one service that is identified in the form of:
host_name::service_description::attribute_name
Examples:
>>> change_service_attribute("localhost::Ping::service_description", "Ping2")
Returns:
True on success,
Raises:
Exception on error
"""
tmp = identifier.split('::')
if len(tmp) != 3:
raise ValueError(
_("identifier must be in the form of host_name::service_description::attribute_name (got %s)") % identifier)
host_name, service_description, attribute_name = tmp
try:
service = Model.Service.objects.get_by_shortname(
"%s/%s" % (host_name, service_description))
except KeyError, e:
raise KeyError(_("Could not find service %s") % e)
service[attribute_name] = new_value
service.save()
return True
def copy_object(object_id, recursive=False, **kwargs):
""" Copy one objectdefinition.
Arguments:
object_id -- id of the object to be copied
recursive -- If True, also copy related child objects
**kwargs -- Any other argument will be treated as an attribute
-- to change on the new object
Returns:
"Object successfully copied"
Examples:
copy_object(1234567890, host_name=new_hostname)
"Object successfully copied to <filename>"
"""
o = Model.ObjectDefinition.objects.get_by_id(object_id)
new_object = o.copy(recursive=recursive, **kwargs)
return _("Object successfully copied to %s") % new_object.get_filename()
def run_check_command(object_id):
""" Runs the check_command for one specified object
Arguments:
object_id -- object_id of the definition (i.e. host or service)
Returns:
[return_code,stdout,stderr]
"""
if platform.node() == 'adagios.opensource.is':
return 1, _('Running check commands is disabled in demo-environment')
o = Model.ObjectDefinition.objects.get_by_id(object_id)
return o.run_check_command()
def set_maincfg_attribute(attribute, new_value, old_value='None', append=False):
""" Sets specific configuration values of nagios.cfg
Required Arguments:
attribute -- Attribute to change (i.e. process_performance_data)
new_value -- New value for the attribute (i.e. "1")
Optional Arguments:
old_value -- Specify this to change specific value
filename -- Configuration file to modify (i.e. /etc/nagios/nagios.cfg)
append -- Set to 'True' to append a new configuration attribute
Returns:
True -- If any changes were made
False -- If no changes were made
"""
filename = Model.config.cfg_file
if old_value.lower() == 'none':
old_value = None
if new_value.lower() == 'none':
new_value = None
if filename.lower() == 'none':
filename = None
if append.lower() == 'false':
append = False
elif append.lower() == 'true':
append = True
elif append.lower() == 'none':
append = None
return Model.config._edit_static_file(attribute=attribute, new_value=new_value, old_value=old_value, filename=filename, append=append)
def reload_nagios():
""" Reloads nagios. Returns "Success" on Success """
daemon = Control.daemon(
nagios_cfg=Model.config.cfg_file,
nagios_init=adagios.settings.nagios_init_script,
nagios_bin=adagios.settings.nagios_binary
)
result = {}
if daemon.reload() == 0:
result['status'] = _("success")
result['message'] = _('Nagios Successfully reloaded')
else:
result['status'] = _("error")
result['message'] = _("Failed to reload nagios (do you have enough permissions?)")
return result
def needs_reload():
""" Returns True if Nagios server needs to reload configuration """
return Model.config.needs_reload()
def dnslookup(host_name):
try:
(name, aliaslist, addresslist) = gethostbyname_ex(host_name)
return {'host': name, 'aliaslist': aliaslist, 'addresslist': addresslist}
except Exception, e:
return {'error': str(e)}
def contactgroup_hierarchy(**kwargs):
result = []
try:
groups = Model.Contactgroup.objects.all
for i in groups:
display = {}
display['v'] = i.contactgroup_name
display['f'] = '%s<div style="color:green; font-style:italic">%s contacts</div>' % (
i.contactgroup_name, 0)
arr = [display, i.contactgroup_members or '', str(i)]
result.append(arr)
return result
except Exception, e:
return {'error': str(e)}
def add_object(object_type, filename=None, **kwargs):
""" Create one specific object definition and store it in nagios.
Arguments:
object_type -- What kind of object to create (host, service,contactgroup, etc)
filename -- Which configuration file to store the object in. If filename=None pynag will decide
-- where to store the file
**kwargs -- Any other arguments will be treated as an attribute for the new object definition
Returns:
{'filename':XXX, 'raw_definition':XXX}
Examples:
add_object(object_type=host, host_name="localhost.example", address="127.0.0.1", use="generic-host"
"""
my_object = Model.string_to_class.get(object_type)()
if filename is not None:
my_object.set_filename(filename)
for k, v in kwargs.items():
my_object[k] = v
my_object.save()
return {"filename": my_object.get_filename(), "raw_definition": str(my_object)}
def check_command(host_name, service_description, name=None, check_command=None, **kwargs):
""" Returns all macros of a given service/host
Arguments:
host_name -- Name of host
service_description -- Service description
check_command -- Name of check command
Any **kwargs will be treated as arguments or custom macros that will be changed on-the-fly before returning
Returns:
dict similar to the following:
{ 'host_name': ...,
'service_description': ...,
'check_command': ...,
'$ARG1$': ...,
'$SERVICE_MACROx$': ...,
}
"""
if host_name in ('None', None, ''):
my_object = Model.Service.objects.get_by_name(name)
elif service_description in ('None', None, '', u''):
my_object = Model.Host.objects.get_by_shortname(host_name)
else:
short_name = "%s/%s" % (host_name, service_description)
my_object = Model.Service.objects.get_by_shortname(short_name)
if check_command in (None, '', 'None'):
command = my_object.get_effective_check_command()
else:
command = Model.Command.objects.get_by_shortname(check_command)
# Lets put all our results in a nice little dict
macros = {}
cache = Model.ObjectFetcher._cache_only
try:
Model.ObjectFetcher._cache_only = True
macros['check_command'] = command.command_name
macros['original_command_line'] = command.command_line
macros['effective_command_line'] = my_object.get_effective_command_line()
# Lets get all macros that this check command defines:
regex = re.compile("(\$\w+\$)")
macronames = regex.findall(command.command_line)
for i in macronames:
macros[i] = my_object.get_macro(i) or ''
if not check_command:
# Argument macros are special (ARGX), lets display those as is, without resolving it to the fullest
ARGs = my_object.check_command.split('!')
for i, arg in enumerate(ARGs):
if i == 0:
continue
macronames = regex.findall(arg)
for m in macronames:
macros[m] = my_object.get_macro(m) or ''
macros['$ARG{i}$'.format(i=i)] = arg
finally:
Model.ObjectFetcher._cache_only = cache
return macros
def verify_configuration():
""" Verifies nagios configuration and returns the output of nagios -v nagios.cfg
"""
binary = adagios.settings.nagios_binary
config = adagios.settings.nagios_config
command = "%s -v '%s'" % (binary, config)
code, stdout, stderr = Utils.runCommand(command)
result = {}
result['return_code'] = code
result['output'] = stdout
result['errors'] = stderr
return result
def get_object_statistics():
""" Returns a list of all object_types with total number of configured objects
Example result:
[
{"object_type":"host", "total":50},
{"object_type":"service", "total":50},
]
"""
object_types = []
Model.ObjectDefinition.objects.reload_cache()
for k, v in Model.ObjectFetcher._cached_object_type.items():
total = len(v)
object_types.append({"object_type": k, "total": total})
return object_types
def autocomplete(q):
""" Returns a list of {'hosts':[], 'hostgroups':[],'services':[]} matching search query q
"""
if q is None:
q = ''
result = {}
hosts = Model.Host.objects.filter(host_name__contains=q)
services = Model.Service.objects.filter(service_description__contains=q)
hostgroups = Model.Hostgroup.objects.filter(hostgroup_name__contains=q)
result['hosts'] = sorted(set(map(lambda x: x.host_name, hosts)))
result['hostgroups'] = sorted(set(map(lambda x: x.hostgroup_name, hostgroups)))
result['services'] = sorted(set(map(lambda x: x.service_description, services)))
return result
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
class TestModel(models.Model):
testField = models.CharField(max_length=100)
testField2 = models.CharField(max_length=100)
class BusinessProcess(models.Model):
processes = models.ManyToManyField("self", unique=False, blank=True)
name = models.CharField(max_length=100, unique=True)
display_name = models.CharField(max_length=100, blank=True)
notes = models.CharField(max_length=1000, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
class Graph(models.Model):
host_name = models.CharField(max_length=100)
service_description = models.CharField(max_length=100)
metric_name = models.CharField(max_length=100)
########NEW FILE########
__FILENAME__ = rest
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2012, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This is a rest interface used by the "/rest/" module that affects adagios directly.
"""
from adagios import __version__, notifications, tasks
from adagios.settings import plugins
from adagios import userdata
from django.utils.translation import ugettext as _
version = __version__
def add_notification(level="info", message="message", notification_id=None, notification_type=None, user=None):
""" Add a new notification to adagios notification bar.
Arguments:
level -- pick "info" "success" "error" "danger"
message -- Arbitary text message,
notification_id (optional) -- Use this if you want to remote
-- remove this notification later via clear_notification()
notification_type -- Valid options: "generic" and "show_once"
user -- If specified, only display notification for this specific user.
Returns:
None
Examples:
>>> add_notification(level="warning", message="Nagios needs to reload")
"""
if not notification_id:
notification_id = str(message.__hash__())
if not notification_type:
notification_type = "generic"
notification = locals()
notifications[notification_id] = notification
def clear_notification(notification_id):
""" Clear one notification from adagios notification panel """
if notification_id in notifications:
del notifications[notification_id]
return "success"
return "not found"
def get_notifications(request):
""" Shows all current notifications """
result = []
for k in notifications.keys():
i = notifications[k]
if i.get('user') and i.get('user') != request.META.get('remote_user'):
continue # Skipt this message if it is meant for someone else
elif i.get('notification_type') == 'show_once':
del notifications[k]
pass
result.append(i)
return result
def clear_all_notifications():
""" Removes all notifications from adagios notification panel """
notifications.clear()
return "all notifications cleared"
def list_tasks():
"""
"""
result = []
for task in tasks:
current_task = {
'task_id': task.get_id(),
'task_status': task.status()
}
result.append(current_task)
return result
def get_task(task_id="someid"):
""" Return information about one specific background task """
for task in tasks:
if str(task.get_id) == str(task_id) or task_id:
current_task = {
'task_id': task.get_id(),
'task_status': task.status()
}
return current_task
raise KeyError(_("Task not '%s' Found") % task_id)
def get_user_preferences(request):
try:
user = userdata.User(request)
except Exception as e:
raise e
return user.to_dict()
def set_user_preference(request, **kwargs):
try:
user = userdata.User(request)
except Exception as e:
raise e
for (k, v) in kwargs.iteritems():
if not k.startswith('_'):
user.set_pref(k, v)
user.save()
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
import adagios.utils
import os
class FakeAdagiosEnvironment(unittest.TestCase):
""" Test the features of adagios.utils.FakeAdagiosEnvironment
"""
@classmethod
def setUpClass(cls):
cls.fake_adagios = adagios.utils.FakeAdagiosEnvironment()
@classmethod
def tearDownClass(cls):
cls.fake_adagios.terminate()
def testFakeAdagiosEnvironment(self):
fake_adagios = self.fake_adagios
# Make sure temporary environment gets created
fake_adagios.create_minimal_environment()
self.assertTrue(os.path.exists(fake_adagios.adagios_config_file))
# Make sure adagios.settings is updated
global_config_file = adagios.settings.adagios_configfile
fake_adagios.update_adagios_global_variables()
# Make sure adagios_config_file changed
self.assertTrue(adagios.settings.adagios_configfile != global_config_file)
# Make sure the new test is in the tempdir
self.assertTrue(adagios.settings.adagios_configfile.startswith(fake_adagios.tempdir))
# Make sure global variables are proparly restored
fake_adagios.restore_adagios_global_variables()
self.assertTrue(adagios.settings.adagios_configfile == global_config_file)
class MiscTestCase(unittest.TestCase):
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.update_adagios_global_variables()
def tearDown(self):
self.environment.terminate()
def _testPageLoad(self, url):
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200)
def TestPageLoads(self):
""" Smoke test views in /misc/
"""
self.loadPage("/misc/settings")
self.loadPage("/misc/preferences")
self.loadPage("/misc/nagios")
self.loadPage("/misc/settings")
self.loadPage("/misc/service")
self.loadPage("/misc/pnp4nagios")
self.loadPage("/misc/mail")
self.loadPage("/misc/images")
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(e)s") % {'url': url, 'e': e})
def test_user_preferences(self):
c = Client()
response = c.post('/misc/preferences/',
{'theme': 'spacelab', 'language': 'fr'})
assert(response.status_code == 200)
assert('spacelab/style.css' in response.content)
assert('(fr)' in response.content)
def load_get(self, url):
c = Client()
response = c.get(url)
return response
def test_topmenu_highlight(self):
r = self.load_get('/status/')
assert '<li class="active">\n <a href="/status">' in r.content
def test_leftmenu_highlight(self):
r = self.load_get('/status/problems')
assert '<li class="active">\n <a href="/status/problems">' in r.content
def test_app_name(self):
from adagios import settings
settings.TOPMENU_HOME = 'Free beer'
r = self.load_get('/status')
assert 'Free beer' in r.content
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('',
(r'^/test/?', 'adagios.misc.views.test'),
(r'^/paste/?', 'adagios.misc.views.paste'),
(r'^/?$', 'adagios.misc.views.index'),
(r'^/settings/?', 'adagios.misc.views.settings'),
(r'^/preferences/?', 'adagios.misc.views.preferences'),
(r'^/nagios/?', 'adagios.misc.views.nagios'),
(r'^/iframe/?', 'adagios.misc.views.iframe'),
(r'^/gitlog/?', 'adagios.misc.views.gitlog'),
(r'^/service/?', 'adagios.misc.views.nagios_service'),
(r'^/pnp4nagios/?$', 'adagios.misc.views.pnp4nagios'),
(r'^/pnp4nagios/edit(?P<filename>.+)$', 'adagios.misc.views.pnp4nagios_edit_template'),
(r'^/mail', 'adagios.misc.views.mail'),
url(r'^/images/(?P<path>.+)$', 'django.views.static.serve', {'document_root': '/usr/share/nagios3/htdocs/images/logos/'}, name="logo"),
(r'^/images/?$', 'adagios.misc.views.icons'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.context_processors import csrf
from django.forms.formsets import BaseFormSet
from django.shortcuts import render_to_response
from django.shortcuts import render
from django.utils.translation import ugettext as _
from django.shortcuts import HttpResponse
from django.template import RequestContext
from adagios.misc import forms
import os
import mimetypes
import pynag.Model
import pynag.Utils
import pynag.Control
import pynag.Model.EventHandlers
import pynag.Utils
import os.path
from time import mktime, sleep
from datetime import datetime
from os.path import dirname
from subprocess import Popen, PIPE
import adagios.settings
import adagios.objectbrowser
from adagios import __version__
import adagios.status.utils
from adagios import userdata
from collections import defaultdict
from adagios.views import adagios_decorator, error_page
state = defaultdict(lambda: "unknown")
state[0] = "ok"
state[1] = "warning"
state[2] = "critical"
@adagios_decorator
def index(request):
c = {}
c['nagios_cfg'] = pynag.Model.config.cfg_file
c['version'] = __version__
return render_to_response('frontpage.html', c, context_instance=RequestContext(request))
@adagios_decorator
def settings(request):
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = e = []
if request.method == 'GET':
form = forms.AdagiosSettingsForm(initial=request.GET)
form.is_valid()
elif request.method == 'POST':
form = forms.AdagiosSettingsForm(data=request.POST)
if form.is_valid():
try:
form.save()
m.append(_("%s successfully saved.") % form.adagios_configfile)
except IOError, exc:
e.append(exc)
else:
raise Exception(_("We only support methods GET or POST"))
c['form'] = form
return render_to_response('settings.html', c, context_instance=RequestContext(request))
@adagios_decorator
def nagios(request):
return iframe(request, adagios.settings.nagios_url)
@adagios_decorator
def iframe(request, url=None):
if not url:
url = request.GET.get('url', None)
return render_to_response('iframe.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def gitlog(request):
""" View that displays a nice log of previous git commits in dirname(config.cfg_file) """
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = []
# Get information about the committer
author_name = request.META.get('REMOTE_USER', 'anonymous')
try:
contact = pynag.Model.Contact.objects.get_by_shortname(author_name)
author_email = contact.email or None
except Exception:
author_email = None
nagiosdir = dirname(pynag.Model.config.cfg_file or None)
git = pynag.Utils.GitRepo(
directory=nagiosdir, author_name=author_name, author_email=author_email)
c['nagiosdir'] = nagiosdir
c['commits'] = []
if request.method == 'POST':
try:
if 'git_init' in request.POST:
git.init()
elif 'git_commit' in request.POST:
filelist = []
commit_message = request.POST.get(
'git_commit_message', _("bulk commit by adagios"))
for i in request.POST:
if i.startswith('commit_'):
filename = i[len('commit_'):]
git.add(filename)
filelist.append(filename)
if len(filelist) == 0:
raise Exception(_("No files selected."))
git.commit(message=commit_message, filelist=filelist)
m.append(_("%s files successfully commited.") % len(filelist))
except Exception, e:
c['errors'].append(e)
# Check if nagiosdir has a git repo or not
try:
c['uncommited_files'] = git.get_uncommited_files()
except pynag.Model.EventHandlers.EventHandlerError, e:
if e.errorcode == 128:
c['no_git_repo_found'] = True
# Show git history
try:
c['commits'] = git.log()
commit = request.GET.get('show', False)
if commit != False:
c['diff'] = git.show(commit)
difflines = []
for i in c['diff'].splitlines():
if i.startswith('---'):
tag = 'hide'
elif i.startswith('+++'):
tag = 'hide'
elif i.startswith('index'):
tag = 'hide'
elif i.startswith('-'):
tag = "alert-danger"
elif i.startswith('+'):
tag = "alert-success"
elif i.startswith('@@'):
tag = 'alert-unknown'
elif i.startswith('diff'):
tag = "filename"
else:
continue
difflines.append({'tag': tag, 'line': i})
c['difflines'] = difflines
c['commit_id'] = commit
except Exception, e:
c['errors'].append(e)
return render_to_response('gitlog.html', c, context_instance=RequestContext(request))
@adagios_decorator
def nagios_service(request):
""" View to restart / reload nagios service """
c = {}
c['errors'] = []
c['messages'] = []
nagios_bin = adagios.settings.nagios_binary
nagios_init = adagios.settings.nagios_init_script
nagios_cfg = adagios.settings.nagios_config
if request.method == 'GET':
form = forms.NagiosServiceForm(initial=request.GET)
else:
form = forms.NagiosServiceForm(data=request.POST)
if form.is_valid():
form.save()
c['stdout'] = form.stdout
c['stderr'] = form.stderr
c['command'] = form.command
for i in form.stdout.splitlines():
if i.strip().startswith('Error:'):
c['errors'].append(i)
c['form'] = form
service = pynag.Control.daemon(
nagios_bin=nagios_bin, nagios_cfg=nagios_cfg, nagios_init=nagios_init)
c['status'] = s = service.status()
if s == 0:
c['friendly_status'] = "running"
elif s == 1:
c['friendly_status'] = "not running"
else:
c['friendly_status'] = 'unknown (exit status %s)' % (s, )
needs_reload = pynag.Model.config.needs_reload()
c['needs_reload'] = needs_reload
return render_to_response('nagios_service.html', c, context_instance=RequestContext(request))
@adagios_decorator
def pnp4nagios(request):
""" View to handle integration with pnp4nagios """
c = {}
c['errors'] = e = []
c['messages'] = m = []
c['broker_module'] = forms.PNPBrokerModuleForm(initial=request.GET)
c['templates_form'] = forms.PNPTemplatesForm(initial=request.GET)
c['action_url'] = forms.PNPActionUrlForm(initial=request.GET)
c['pnp_templates'] = forms.PNPTemplatesForm(initial=request.GET)
try:
c['npcd_config'] = forms.PNPConfigForm(initial=request.GET)
except Exception, e:
c['errors'].append(e)
#c['interesting_objects'] = form.interesting_objects
if request.method == 'POST' and 'save_broker_module' in request.POST:
c['broker_module'] = broker_form = forms.PNPBrokerModuleForm(
data=request.POST)
if broker_form.is_valid():
broker_form.save()
m.append(_("Broker Module updated in nagios.cfg"))
elif request.method == 'POST' and 'save_action_url' in request.POST:
c['action_url'] = forms.PNPActionUrlForm(data=request.POST)
if c['action_url'].is_valid():
c['action_url'].save()
m.append(_('Action_url updated for %s services') %
c['action_url'].total_services)
if c['action_url'].error_services > 0:
e.append(
_("%s services could not be updated (check permissions?)") %
c['action_url'].error_services)
elif request.method == 'POST' and 'save_npcd_config' in request.POST:
c['npcd_config'] = forms.PNPConfigForm(data=request.POST)
if c['npcd_config'].is_valid():
c['npcd_config'].save()
m.append(_("npcd.cfg updated"))
return render_to_response('pnp4nagios.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit_file(request, filename):
""" This view gives raw read/write access to a given filename.
Please be so kind as not to give direct url access to this function, because it will allow
Editing of any file the webserver has access to.
"""
c = {}
c['messages'] = []
c['errors'] = []
try:
c['form'] = forms.EditFileForm(filename=filename, initial=request.GET)
c['filename'] = filename
if request.method == 'POST':
c['form'] = forms.EditFileForm(
filename=filename, data=request.POST)
if c['form'].is_valid():
c['form'].save()
except Exception, e:
c['errors'].append(e)
return render_to_response('editfile.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit_nagios_cfg(request):
""" Allows raw editing of nagios.cfg configfile
"""
return edit_file(request, filename=adagios.settings.nagios_config)
@adagios_decorator
def pnp4nagios_edit_template(request, filename):
""" Allows raw editing of a pnp4nagios template.
Will throw security exception if filename is not a pnp4nagios template
"""
form = forms.PNPTemplatesForm(initial=request.GET)
if filename in form.templates:
return edit_file(request, filename=filename)
else:
raise Exception(
_("Security violation. You are not allowed to edit %s") % filename)
@adagios_decorator
def icons(request, image_name=None):
""" Use this view to see nagios icons/logos
"""
c = {}
c['messages'] = []
c['errors'] = []
image_path = '/usr/share/nagios3/htdocs/images/logos/'
filenames = []
for root, subfolders, files in os.walk(image_path):
for filename in files:
filenames.append(os.path.join(root, filename))
# Cut image_path out of every filename
filenames = map(lambda x: x[len(image_path):], filenames)
# Filter out those silly .gd2 files that don't display inside a browser
filenames = filter(lambda x: not x.lower().endswith('.gd2'), filenames)
filenames.sort()
if not image_name:
# Return a list of images
c['images'] = filenames
return render_to_response('icons.html', c, context_instance=RequestContext(request))
else:
if image_name in filenames:
file_extension = image_name.split('.').pop()
mime_type = mimetypes.types_map.get(file_extension)
fsock = open("%s/%s" % (image_path, image_name,))
return HttpResponse(fsock, mimetype=mime_type)
else:
raise Exception(_("Not allowed to see this image"))
@adagios_decorator
def mail(request):
""" Send a notification email to one or more contacts regarding hosts or services """
c = {}
c['messages'] = []
c['errors'] = []
c.update(csrf(request))
c['http_referer'] = request.META.get("HTTP_REFERER")
c['http_origin'] = request.META.get("HTTP_ORIGIN")
remote_user = request.META.get('REMOTE_USER', 'anonymous adagios user')
hosts = []
services = []
if request.method == 'GET':
c['form'] = forms.SendEmailForm(remote_user, initial=request.GET)
hosts = request.GET.getlist('host') or request.GET.getlist('host[]')
services = request.GET.getlist(
'service') or request.GET.getlist('service[]')
if not services and not hosts:
c['form'].services = adagios.status.utils.get_services(
request, host_name='localhost')
elif request.method == 'POST':
c['form'] = forms.SendEmailForm(remote_user, data=request.POST)
services = request.POST.getlist('service') or request.POST.getlist('service[]')
hosts = request.POST.getlist('host') or request.POST.getlist('host[]')
c['acknowledged_or_not'] = request.POST.get('acknowledge_all_problems') == 'true'
for host_name in hosts:
host_object = adagios.status.utils.get_hosts(request, host_name=host_name)
if not host_object:
c['errors'].append(
_("Host %s not found. Maybe a typo or you do not have access to it.") % host_name
)
continue
for item in host_object:
item['host_name'] = item['name']
item['description'] = _("Host Status")
c['form'].status_objects.append(item)
c['form'].hosts.append(item)
for i in services:
try:
host_name, service_description = i.split('/', 1)
service = adagios.status.utils.get_services(request,
host_name=host_name,
service_description=service_description
)
if not service:
c['errors'].append(
_('Service "%s"" not found. Maybe a typo or you do not have access to it ?') % i)
for x in service:
c['form'].status_objects.append(x)
c['form'].services.append(x)
except AttributeError, e:
c['errors'].append(_("AttributeError for '%(i)s': %(e)s") % {'i': i, 'e': e})
except KeyError, e:
c['errors'].append(_("Error adding service '%(i)s': %(e)s") % {'i': i, 'e': e})
c['services'] = c['form'].services
c['hosts'] = c['form'].hosts
c['status_objects'] = c['form'].status_objects
c['form'].html_content = render(
request, "snippets/misc_mail_objectlist.html", c).content
if request.method == 'POST' and c['form'].is_valid():
c['form'].save()
return render_to_response('misc_mail.html', c, context_instance=RequestContext(request))
@adagios_decorator
def test(request):
""" Generic test view, use this as a sandbox if you like
"""
c = {}
c['messages'] = []
c.update(csrf(request))
# Get some test data
if request.method == 'POST':
c['form'] = forms.PluginOutputForm(data=request.POST)
if c['form'].is_valid():
c['form'].parse()
else:
c['form'] = forms.PluginOutputForm(initial=request.GET)
return render_to_response('test.html', c, context_instance=RequestContext(request))
@adagios_decorator
def paste(request):
""" Generic test view, use this as a sandbox if you like
"""
c = {}
c['messages'] = []
c.update(csrf(request))
# Get some test data
if request.method == 'POST':
c['form'] = forms.PasteForm(data=request.POST)
if c['form'].is_valid():
c['form'].parse()
else:
c['form'] = forms.PasteForm(initial=request.GET)
return render_to_response('test2.html', c, context_instance=RequestContext(request))
@adagios_decorator
def preferences(request):
c = {}
c['messages'] = []
c.update(csrf(request))
user = userdata.User(request)
if request.method == 'POST':
c['form'] = forms.UserdataForm(data=request.POST)
if c['form'].is_valid():
for k, v in c['form'].cleaned_data.iteritems():
user.set_pref(k, v)
user.save() # will save in json and trigger the hooks
c['messages'].append(_('Preferences have been saved.'))
else:
c['form'] = forms.UserdataForm(initial=user.to_dict())
return render_to_response('userdata.html', c, context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = tests
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
urlpatterns = patterns('adagios',
(r'^/?$', 'myapp.views.hello_world'),
(r'^/url1?$', 'myapp.views.hello_world'),
(r'^/url2?$', 'myapp.views.hello_world'),
)
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Create your views here.
from django.core.context_processors import csrf
from django.shortcuts import render_to_response
from django.shortcuts import HttpResponse
from django.shortcuts import RequestContext
def hello_world(request):
""" This is an example view. """
c = {}
return render_to_response("myapp_helloworld.html", c, context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.utils.safestring import mark_safe
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from pynag import Model
from pynag.Utils import AttributeList
from adagios.objectbrowser.help_text import object_definitions
from pynag.Model import ObjectDefinition
from adagios.forms import AdagiosForm
import adagios.misc.rest
# These fields are special, they are a comma seperated list, and may or
# may not have +/- in front of them.
MULTICHOICE_FIELDS = ('servicegroups', 'hostgroups', 'contacts',
'contact_groups', 'contactgroups', 'use', 'notification_options')
SERVICE_NOTIFICATION_OPTIONS = (
('w', 'warning'),
('c', 'critical'),
('r', 'recovery'),
('u', 'unreachable'),
('d', 'downtime'),
('f', 'flapping'),
)
HOST_NOTIFICATION_OPTIONS = (
('d', 'down'),
('u', 'unreachable'),
('r', 'recovery'),
('f', 'flapping'),
('s', 'scheduled_downtime')
)
BOOLEAN_CHOICES = (('', 'not set'), ('1', '1'), ('0', '0'))
class PynagChoiceField(forms.MultipleChoiceField):
""" multichoicefields that accepts comma seperated input as values """
def __init__(self, inline_help_text=_("Select some options"), *args, **kwargs):
self.__prefix = ''
self.data = kwargs.get('data')
super(PynagChoiceField, self).__init__(*args, **kwargs)
self.widget.attrs['data-placeholder'] = inline_help_text
def clean(self, value):
"""
Changes list into a comma separated string. Removes duplicates.
"""
if not value:
return "null"
tmp = []
for i in value:
if i not in tmp:
tmp.append(i)
value = self.__prefix + ','.join(tmp)
return value
def prepare_value(self, value):
"""
Takes a comma separated string, removes + if it is prefixed so. Returns a list
"""
if isinstance(value, str):
self.attributelist = AttributeList(value)
self.__prefix = self.attributelist.operator
return self.attributelist.fields
return value
class PynagRadioWidget(forms.widgets.HiddenInput):
""" Special Widget designed to make Nagios attributes with 0/1 values look like on/off buttons """
def render(self, name, value, attrs=None):
output = super(PynagRadioWidget, self).render(name, value, attrs)
one, zero, unset = "", "", ""
if value == "1":
one = "active"
elif value == "0":
zero = "active"
else:
unset = "active"
prefix = """
<div class="btn-group" data-toggle-name="%s" data-toggle="buttons-radio">
<button type="button" value="1" class="btn btn %s">On</button>
<button type="button" value="0" class="btn btn %s">Off</button>
<button type="button" value="" class="btn %s">Not set</button>
</div>
""" % (name, one, zero, unset)
output += prefix
return mark_safe(output)
class PynagForm(AdagiosForm):
def clean(self):
cleaned_data = super(PynagForm, self).clean()
for k, v in cleaned_data.items():
# change from unicode to str
v = cleaned_data[k] = smart_str(v)
# Empty string, or the string None, means remove the field
if v in ('', 'None'):
cleaned_data[k] = v = None
# Maintain operator (+,-, !) for multichoice fields
if k in MULTICHOICE_FIELDS and v and v != "null":
operator = AttributeList(self.pynag_object.get(k, '')).operator or ''
cleaned_data[k] = "%s%s" % (operator, v)
return cleaned_data
def save(self):
changed_keys = map(lambda x: smart_str(x), self.changed_data)
for k in changed_keys:
# Ignore fields that did not appear in the POST at all EXCEPT
# If it it a pynagchoicefield. That is because multichoicefield that
# does not appear in the post, means that the user removed every attribute
# in the multichoice field
if k not in self.data and not isinstance(self.fields.get(k, None), PynagChoiceField):
continue
value = self.cleaned_data[k]
# Sometimes attributes slide in changed_data without having
# been modified, lets ignore those
if self.pynag_object[k] == value:
continue
# Multichoice fields have a special restriction, sometimes they contain
# the same values as before but in a different order.
if k in MULTICHOICE_FIELDS:
original = AttributeList(self.pynag_object[k])
new = AttributeList(value)
if sorted(original.fields) == sorted(new.fields):
continue # If we reach here, it is save to modify our pynag object.
# Here we actually make a change to our pynag object
self.pynag_object[k] = value
# Additionally, update the field for the return form
self.fields[k] = self.get_pynagField(k, css_tag="defined")
self.fields[k].value = value
self.pynag_object.save()
adagios.misc.rest.add_notification(message=_("Object successfully saved"), level="success", notification_type="show_once")
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(PynagForm, self).__init__(*args, **kwargs)
# Lets find out what attributes to create
object_type = pynag_object['object_type']
defined_attributes = sorted(
self.pynag_object._defined_attributes.keys())
inherited_attributes = sorted(
self.pynag_object._inherited_attributes.keys())
all_attributes = sorted(object_definitions.get(object_type).keys())
all_attributes += ['name', 'use', 'register']
# Special hack for macros
# If this is a post and any post data looks like a nagios macro
# We will generate a field for it on the fly
macros = filter(lambda x: x.startswith('$') and x.endswith('$'), self.data.keys())
for field_name in macros:
# if field_name.startswith('$ARG'):
# self.fields[field_name] = self.get_pynagField(field_name, css_tag='defined')
if object_type == 'service' and field_name.startswith('$_SERVICE'):
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='defined')
elif object_type == 'host' and field_name.startswith('$_HOST'):
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='defined')
# Calculate what attributes are "undefined"
self.undefined_attributes = []
for i in all_attributes:
if i in defined_attributes:
continue
if i in inherited_attributes:
continue
self.undefined_attributes.append(i)
# Find out which attributes to show
for field_name in defined_attributes:
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='defined')
for field_name in inherited_attributes:
self.fields[field_name] = self.get_pynagField(
field_name, css_tag="inherited")
for field_name in self.undefined_attributes:
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='undefined')
return
def get_pynagField(self, field_name, css_tag="", required=None):
""" Takes a given field_name and returns a forms.Field that is appropriate for this field
Arguments:
field_name -- Name of the field to add, example "host_name"
css_tag -- String will make its way as a css attribute in the resulting html
required -- If True, make field required. If None, let pynag decide
"""
# Lets figure out what type of field this is, default to charfield
object_type = self.pynag_object['object_type']
definitions = object_definitions.get(object_type) or {}
options = definitions.get(field_name) or {}
# Find out what type of field to create from the field_name.
# Lets assume charfield in the beginning
field = forms.CharField()
if False is True:
pass
elif field_name in ('contact_groups', 'contactgroups', 'contactgroup_members'):
all_groups = Model.Contactgroup.objects.filter(
contactgroup_name__contains="")
choices = sorted(
map(lambda x: (x.contactgroup_name, x.contactgroup_name), all_groups))
field = PynagChoiceField(
choices=choices, inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name == 'use':
all_objects = self.pynag_object.objects.filter(name__contains='')
choices = map(lambda x: (x.name, x.name), all_objects)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %s selected") % {'field_name': field_name})
elif field_name in ('servicegroups', 'servicegroup_members'):
all_groups = Model.Servicegroup.objects.filter(
servicegroup_name__contains='')
choices = map(
lambda x: (x.servicegroup_name, x.servicegroup_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name in ('hostgroups', 'hostgroup_members', 'hostgroup_name') and object_type != 'hostgroup':
all_groups = Model.Hostgroup.objects.filter(
hostgroup_name__contains='')
choices = map(
lambda x: (x.hostgroup_name, x.hostgroup_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name == 'members' and object_type == 'hostgroup':
all_groups = Model.Host.objects.filter(host_name__contains='')
choices = map(lambda x: (x.host_name, x.host_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name == 'host_name' and object_type == 'service':
all_groups = Model.Host.objects.filter(host_name__contains='')
choices = map(lambda x: (x.host_name, x.host_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name in ('contacts', 'members'):
all_objects = Model.Contact.objects.filter(
contact_name__contains='')
choices = map(
lambda x: (x.contact_name, x.contact_name), all_objects)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %s selected") % {'field_name': field_name})
elif field_name.endswith('_period'):
all_objects = Model.Timeperiod.objects.filter(
timeperiod_name__contains='')
choices = [('', '')] + map(
lambda x: (x.timeperiod_name, x.timeperiod_name), all_objects)
field = forms.ChoiceField(choices=sorted(choices))
elif field_name.endswith('notification_commands'):
all_objects = Model.Command.objects.filter(
command_name__contains='')
choices = [('', '')] + map(
lambda x: (x.command_name, x.command_name), all_objects)
field = PynagChoiceField(choices=sorted(choices))
# elif field_name == 'check_command':
# all_objects = Model.Command.objects.all
# choices = [('','')] + map(lambda x: (x.command_name, x.command_name), all_objects)
# field = forms.ChoiceField(choices=sorted(choices))
elif field_name.endswith('notification_options') and self.pynag_object.object_type == 'host':
field = PynagChoiceField(
choices=HOST_NOTIFICATION_OPTIONS, inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name.endswith('notification_options') and self.pynag_object.object_type == 'service':
field = PynagChoiceField(
choices=SERVICE_NOTIFICATION_OPTIONS, inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif options.get('value') == '[0/1]':
field = forms.CharField(widget=PynagRadioWidget)
# Lets see if there is any help text available for our field
if field_name in object_definitions[object_type]:
help_text = object_definitions[object_type][field_name].get(
'help_text', _("No help available for this item"))
field.help_text = help_text
# No prettyprint for macros
if field_name.startswith('_'):
field.label = field_name
# If any CSS tag was given, add it to the widget
self.add_css_tag(field=field, css_tag=css_tag)
if 'required' in options:
self.add_css_tag(field=field, css_tag=options['required'])
field.required = options['required'] == 'required'
else:
field.required = False
# At the moment, our database of required objects is incorrect
# So if caller did not specify if field is required, we will not
# make it required
if required is None:
field.required = False
else:
field.required = required
# Put inherited value in the placeholder
inherited_value = self.pynag_object._inherited_attributes.get(
field_name)
if inherited_value is not None:
self.add_placeholder(
field, _('%(inherited_value)s (inherited from template)') % {'inherited_value': inherited_value})
if field_name in MULTICHOICE_FIELDS:
self.add_css_tag(field=field, css_tag="multichoice")
return field
def add_css_tag(self, field, css_tag):
""" Add a CSS tag to the widget of a specific field """
if not 'class' in field.widget.attrs:
field.widget.attrs['class'] = ''
field.css_tag = ''
field.widget.attrs['class'] += " " + css_tag
field.css_tag += " " + css_tag
def add_placeholder(self, field, placeholder=_("Insert some value here")):
field.widget.attrs['placeholder'] = placeholder
field.placeholder = placeholder
class AdvancedEditForm(AdagiosForm):
""" A form for pynag.Model.Objectdefinition
This form will display a charfield for every attribute of the objectdefinition
"Every" attribute means:
* Every defined attribute
* Every inherited attribute
* Every attribute that is defined in nagios object definition html
"""
register = forms.CharField(
required=False, help_text=_("Set to 1 if you want this object enabled."))
name = forms.CharField(required=False, label=_("Generic Name"),
help_text=_("This name is used if you want other objects to inherit (with the use attribute) what you have defined here."))
use = forms.CharField(required=False, label=_("Use"),
help_text=_("Inherit all settings from another object"))
__prefix = "advanced" # This prefix will go on every field
def save(self):
for k in self.changed_data:
# change from unicode to str
value = smart_str(self.cleaned_data[k])
# same as original, lets ignore that
if self.pynag_object[k] == value:
continue
if value == '':
value = None
# If we reach here, it is save to modify our pynag object.
self.pynag_object[k] = value
self.pynag_object.save()
def clean(self):
cleaned_data = super(AdvancedEditForm, self).clean()
for k, v in cleaned_data.items():
# change from unicode to str
cleaned_data[k] = smart_str(v)
return cleaned_data
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(AdvancedEditForm, self).__init__(
*args, prefix=self.__prefix, **kwargs)
# Lets find out what attributes to create
object_type = pynag_object['object_type']
all_attributes = sorted(object_definitions.get(object_type).keys())
for field_name in self.pynag_object.keys() + all_attributes:
if field_name == 'meta':
continue
help_text = ""
if field_name in object_definitions[object_type]:
help_text = object_definitions[object_type][field_name].get(
'help_text', _("No help available for this item"))
self.fields[field_name] = forms.CharField(
required=False, label=field_name, help_text=help_text)
self.fields.keyOrder = sorted(self.fields.keys())
class GeekEditObjectForm(AdagiosForm):
definition = forms.CharField(
widget=forms.Textarea(attrs={'wrap': 'off', 'cols': '80'}))
def __init__(self, pynag_object=None, *args, **kwargs):
self.pynag_object = pynag_object
super(GeekEditObjectForm, self).__init__(*args, **kwargs)
def clean_definition(self, value=None):
definition = smart_str(self.cleaned_data['definition'])
definition = definition.replace('\r\n', '\n')
definition = definition.replace('\r', '\n')
if not definition.endswith('\n'):
definition += '\n'
return definition
def save(self):
definition = self.cleaned_data['definition']
self.pynag_object.rewrite(str_new_definition=definition)
class DeleteObjectForm(AdagiosForm):
""" Form used to handle deletion of one single object """
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(DeleteObjectForm, self).__init__(*args, **kwargs)
if self.pynag_object.object_type == 'host':
recursive = forms.BooleanField(
required=False, initial=True, label=_("Delete Services"),
help_text=_("Check this box if you also want to delete all services of this host"))
self.fields['recursive'] = recursive
def delete(self):
""" Deletes self.pynag_object. """
recursive = False
if 'recursive' in self.cleaned_data and self.cleaned_data['recursive'] is True:
recursive = True
self.pynag_object.delete(recursive)
class CopyObjectForm(AdagiosForm):
""" Form to assist a user to copy a single object definition
"""
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(CopyObjectForm, self).__init__(*args, **kwargs)
object_type = pynag_object['object_type']
# For templates we assume the new copy will have its generic name changed
# otherwise we display different field depending on what type of an
# object it is
if pynag_object['register'] == '0':
if pynag_object.name is None:
new_generic_name = "%s-copy" % pynag_object.get_description()
else:
new_generic_name = '%s-copy' % pynag_object.name
self.fields['name'] = forms.CharField(
initial=new_generic_name, help_text=_("Select a new generic name for this %(object_type)s") % {'object_type': object_type})
elif object_type == 'host':
new_host_name = "%s-copy" % pynag_object.get_description()
self.fields['host_name'] = forms.CharField(
help_text=_("Select a new host name for this host"), initial=new_host_name)
self.fields['address'] = forms.CharField(
help_text=_("Select a new ip address for this host"))
self.fields['recursive'] = forms.BooleanField(
required=False, label="Copy Services", help_text=_("Check this box if you also want to copy all services of this host."))
elif object_type == 'service':
service_description = "%s-copy" % pynag_object.service_description
self.fields['host_name'] = forms.CharField(
help_text=_("Select a new host name for this service"), initial=pynag_object.host_name)
self.fields['service_description'] = forms.CharField(
help_text=_("Select new service description for this service"), initial=service_description)
else:
field_name = "%s_name" % object_type
initial = "%s-copy" % pynag_object[field_name]
help_text = object_definitions[
object_type][field_name].get('help_text')
if help_text == '':
help_text = _("Please specify a new %(field_name)s") % {'field_name': field_name}
self.fields[field_name] = forms.CharField(
initial=initial, help_text=help_text)
def save(self):
# If copy() returns a single object, lets transform it into a list
tmp = self.pynag_object.copy(**self.cleaned_data)
if not type(tmp) == type([]):
tmp = [tmp]
self.copied_objects = tmp
def _clean_shortname(self):
""" Make sure shortname of a particular object does not exist.
Raise validation error if shortname is found
"""
object_type = self.pynag_object.object_type
field_name = "%s_name" % object_type
value = smart_str(self.cleaned_data[field_name])
try:
self.pynag_object.objects.get_by_shortname(value)
raise forms.ValidationError(
_("A %(object_type)s with %(field_name)s='%(value)s' already exists.") % {'object_type': object_type,
'field_name': field_name,
'value': value,
})
except KeyError:
return value
def clean_host_name(self):
if self.pynag_object.object_type == 'service':
return smart_str(self.cleaned_data['host_name'])
return self._clean_shortname()
def clean_timeperiod_name(self):
return self._clean_shortname()
def clean_command_name(self):
return self._clean_shortname()
def clean_contactgroup_name(self):
return self._clean_shortname()
def clean_hostgroup_name(self):
return self._clean_shortname()
def clean_servicegroup_name(self):
return self._clean_shortname()
def clean_contact_name(self):
return self._clean_shortname()
class BaseBulkForm(AdagiosForm):
""" To make changes to multiple objects at once
* any POST data that has the name change_<OBJECTID> will be fetched
and the ObjectDefinition saved in self.changed_objects
* any POST data that has the name hidden_<OBJECTID> will be fetched
and the ObjectDefinition saved in self.all_objects
"""
def __init__(self, objects=None, *args, **kwargs):
self.objects = []
self.all_objects = []
self.changed_objects = []
if not objects:
objects = []
forms.Form.__init__(self, *args, **kwargs)
for k, v in self.data.items():
if k.startswith('hidden_'):
obj = Model.ObjectDefinition.objects.get_by_id(v)
if obj not in self.all_objects:
self.all_objects.append(obj)
if k.startswith('change_'):
object_id = k[len("change_"):]
obj = Model.ObjectDefinition.objects.get_by_id(object_id)
if obj not in self.changed_objects:
self.changed_objects.append(obj)
if obj not in self.all_objects:
self.all_objects.append(obj)
def clean(self):
#self.cleaned_data = {}
for k, v in self.data.items():
if k.startswith('hidden_'):
self.cleaned_data[k] = v
obj = Model.ObjectDefinition.objects.get_by_id(v)
if obj not in self.all_objects:
self.all_objects.append(obj)
if k.startswith('change_'):
self.cleaned_data[k] = v
object_id = k[len("change_"):]
obj = Model.ObjectDefinition.objects.get_by_id(object_id)
if obj not in self.changed_objects:
self.changed_objects.append(obj)
for k, v in self.cleaned_data.items():
self.cleaned_data[k] = smart_str(self.cleaned_data[k])
return self.cleaned_data
class BulkEditForm(BaseBulkForm):
attribute_name = forms.CharField()
new_value = forms.CharField()
def save(self):
for i in self.changed_objects:
key = self.cleaned_data['attribute_name']
value = self.cleaned_data['new_value']
i[key] = value
i.save()
class BulkCopyForm(BaseBulkForm):
attribute_name = forms.CharField()
new_value = forms.CharField()
def __init__(self, *args, **kwargs):
BaseBulkForm.__init__(self, *args, **kwargs)
self.fields['attribute_name'].value = "test 2"
# Lets take a look at the first item to be copied and suggest a field
# name to change
def save(self):
for i in self.changed_objects:
key = self.cleaned_data['attribute_name']
value = self.cleaned_data['new_value']
kwargs = {key: value}
i.copy(**kwargs)
class BulkDeleteForm(BaseBulkForm):
""" Form used to delete multiple objects at once """
yes_i_am_sure = forms.BooleanField(label=_("Yes, I am sure"))
def delete(self):
""" Deletes every object in the form """
for i in self.changed_objects:
if i.object_type == 'host':
recursive = True
else:
recursive = False
i.delete(recursive=recursive)
class CheckCommandForm(PynagForm):
def __init__(self, *args, **kwargs):
super(AdagiosForm, self).__init__(*args, **kwargs)
self.pynag_object = Model.Service()
self.fields['host_name'] = self.get_pynagField('host_name')
self.fields['service_description'] = self.get_pynagField(
'service_description')
self.fields['check_command'] = self.get_pynagField('check_command')
choices_for_all_types = sorted(
map(lambda x: (x, x), Model.string_to_class.keys()))
class AddTemplateForm(PynagForm):
""" Use this form to add one template """
object_type = forms.ChoiceField(choices=choices_for_all_types)
name = forms.CharField(max_length=100)
def __init__(self, *args, **kwargs):
super(PynagForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(AddTemplateForm, self).clean()
if "object_type" not in cleaned_data:
raise forms.ValidationError(_('Object type is required'))
object_type = cleaned_data['object_type']
name = cleaned_data['name']
if object_type not in Model.string_to_class:
raise forms.ValidationError(
_("We dont know nothing about how to add a '%(object_type)s'") % {'object_type': object_type})
objectdefinition = Model.string_to_class.get(object_type)
# Check if name already exists
try:
objectdefinition.objects.get_by_name(name)
raise forms.ValidationError(
_("A %(object_type)s with name='%(name)s' already exists.") % {'object_type': object_type,
'name': name,
})
except KeyError:
pass
self.pynag_object = objectdefinition()
self.pynag_object['register'] = "0"
return cleaned_data
class AddObjectForm(PynagForm):
def __init__(self, object_type, initial=None, *args, **kwargs):
self.pynag_object = Model.string_to_class.get(object_type)()
super(AdagiosForm, self).__init__(*args, **kwargs)
# Some object types we will suggest a template:
if object_type in ('host', 'contact', 'service'):
self.fields['use'] = self.get_pynagField('use')
self.fields['use'].initial = str('generic-%s' % object_type)
self.fields['use'].help_text = _("Inherit attributes from this template")
if object_type == 'host':
self.fields['host_name'] = self.get_pynagField('host_name', required=True)
self.fields['address'] = self.get_pynagField('address', required=True)
self.fields['alias'] = self.get_pynagField('alias', required=False)
elif object_type == 'service':
self.fields['service_description'] = self.get_pynagField('service_description', required=True)
self.fields['host_name'] = self.get_pynagField('host_name', required=False)
self.fields['host_name'].help_text = _('Tell us which host this service check will be applied to')
self.fields['hostgroup_name'] = self.get_pynagField('hostgroup_name', required=False)
self.fields['hostgroup_name'].help_text = _("If you specify any hostgroups, this service will be applied to all hosts in that hostgroup")
else:
field_name = "%s_name" % object_type
self.fields[field_name] = self.get_pynagField(
field_name, required=True)
# For some reason calling super()__init__() with initial as a parameter
# will not work on PynagChoiceFields. This forces initial value to be set:
initial = initial or {}
for field_name, field in self.fields.items():
initial_value = initial.get(field_name, None)
if initial_value:
field.initial = str(initial_value)
def clean(self):
cleaned_data = super(AddObjectForm, self).clean()
if self.pynag_object.object_type == 'service':
host_name = cleaned_data.get('host_name')
hostgroup_name = cleaned_data.get('hostgroup_name')
if host_name in (None, 'None', '') and hostgroup_name in (None, 'None', ''):
raise forms.ValidationError(_("Please specify either hostgroup_name or host_name"))
return cleaned_data
def clean_timeperiod_name(self):
return self._clean_shortname()
def clean_command_name(self):
return self._clean_shortname()
def clean_contactgroup_name(self):
return self._clean_shortname()
def clean_servicegroup_name(self):
return self._clean_shortname()
def clean_contact_name(self):
return self._clean_shortname()
def clean_host_name(self):
if self.pynag_object.object_type == 'service':
value = self.cleaned_data['host_name']
if not value or value == 'null':
return None
hosts = value.split(',')
for i in hosts:
existing_hosts = Model.Host.objects.filter(host_name=i)
if not existing_hosts:
raise forms.ValidationError(
_("Could not find host called '%(i)s'") % {'i': i})
return smart_str(self.cleaned_data['host_name'])
return self._clean_shortname()
def clean_hostgroup_name(self):
if self.pynag_object.object_type == 'service':
value = self.cleaned_data['hostgroup_name']
if value in (None, '', 'null'):
return None
groups = value.split(',')
for i in groups:
existing_hostgroups = Model.Hostgroup.objects.filter(hostgroup_name=i)
if not existing_hostgroups:
raise forms.ValidationError(
_("Could not find hostgroup called '%(i)s'") % {'i': i})
return smart_str(self.cleaned_data['hostgroup_name'])
return self._clean_shortname()
def _clean_shortname(self):
""" Make sure shortname of a particular object does not exist.
Raise validation error if shortname is found
"""
object_type = self.pynag_object.object_type
field_name = "%s_name" % object_type
value = smart_str(self.cleaned_data[field_name])
try:
self.pynag_object.objects.get_by_shortname(value)
raise forms.ValidationError(
_("A %(object_type)s with %(field_name)s='%(value)s' already exists.") % {'object_type': object_type,
'field_name': field_name,
'value': value,
})
except KeyError:
return value
########NEW FILE########
__FILENAME__ = help_text
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" objectbrowser/all_attributes.py
This is an extends of pynag's all_attributes with friendly help message for all attributes.
"""
from pynag.Model.all_attributes import object_definitions
from django.utils.translation import ugettext as _
object_definitions["any"]["use"][
"help_text"] = _("Specifies which object to inherit settings from")
object_definitions["any"]["register"][
"help_text"] = _("Specifies if object is active (registered) or not")
object_definitions["any"]["name"][
"help_text"] = _("Generic name of this objects. Only used for templates.")
object_definitions["host"]["host_name"]["help_text"] = _("e.g. web01.example.com")
object_definitions["host"]["alias"]["help_text"] = _("e.g. My Monitored Host")
object_definitions["host"]["display_name"]["help_text"] = _(" ")
object_definitions["host"]["address"]["help_text"] = _("e.g. 127.0.0.1")
object_definitions["host"]["parents"][
"help_text"] = _("Network parents of this host. No notification will be sent if parent is down.")
object_definitions["host"]["hostgroups"][
"help_text"] = _("Which hostgroups this host belongs to")
object_definitions["host"]["check_command"][
"help_text"] = _("Command to execute when this object is checked")
object_definitions["host"]["initial_state"][
"help_text"] = _('By default Nagios will assume that all hosts are in UP states when it starts. You can override the initial state for a host by using this directive. Valid options are: o = UP, d = DOWN, and u = UNREACHABLE.')
object_definitions["host"]["max_check_attempts"][
"help_text"] = _("How many failures do occur before notifications will be sent")
object_definitions["host"]["check_interval"][
"help_text"] = _("How many minutes to wait between checks")
object_definitions["host"]["retry_interval"][
"help_text"] = _("How many minutes to wait between checks when object goes to warning or critical state")
object_definitions["host"]["active_checks_enabled"][
"help_text"] = _("Whether Nagios actively checks this host")
object_definitions["host"]["passive_checks_enabled"][
"help_text"] = _("Whether Nagios passively accepts check results from an external source")
object_definitions["host"]["check_period"][
"help_text"] = _("When nagios checks for this host")
object_definitions["host"]["obsess_over_host"][
"help_text"] = _('This directive determines whether or not checks for the host will be "obsessed" over using the ochp_command.')
object_definitions["host"]["check_freshness"]["help_text"] = _(" ")
object_definitions["host"]["freshness_threshold"]["help_text"] = _(" ")
object_definitions["host"]["event_handler"]["help_text"] = _(" ")
object_definitions["host"]["event_handler_enabled"]["help_text"] = _(" ")
object_definitions["host"]["low_flap_threshold"]["help_text"] = _(" ")
object_definitions["host"]["high_flap_threshold"]["help_text"] = _(" ")
object_definitions["host"]["flap_detection_enabled"]["help_text"] = _(" ")
object_definitions["host"]["flap_detection_options"]["help_text"] = _(" ")
object_definitions["host"]["process_perf_data"]["help_text"] = _(" ")
object_definitions["host"]["retain_status_information"]["help_text"] = _(" ")
object_definitions["host"]["retain_nonstatus_information"]["help_text"] = _(" ")
object_definitions["host"]["contacts"]["help_text"] = _(" ")
object_definitions["host"]["contact_groups"]["help_text"] = _(" ")
object_definitions["host"]["notification_interval"]["help_text"] = _(" ")
object_definitions["host"]["first_notification_delay"]["help_text"] = _(" ")
object_definitions["host"]["notification_period"]["help_text"] = _(" ")
object_definitions["host"]["notification_options"]["help_text"] = _(" ")
object_definitions["host"]["notifications_enabled"]["help_text"] = _(" ")
object_definitions["host"]["stalking_options"]["help_text"] = _(" ")
object_definitions["host"]["notes"]["help_text"] = _(" ")
object_definitions["host"]["notes_url"]["help_text"] = _(" ")
object_definitions["host"]["action_url"]["help_text"] = _(" ")
object_definitions["host"]["icon_image"]["help_text"] = _(" ")
object_definitions["host"]["icon_image_alt"]["help_text"] = _(" ")
object_definitions["host"]["vrml_image"]["help_text"] = _(" ")
object_definitions["host"]["statusmap_image"]["help_text"] = _(" ")
object_definitions["host"]["2d_coords"]["help_text"] = _(" ")
object_definitions["host"]["3d_coords"]["help_text"] = _(" ")
object_definitions["hostgroup"]["hostgroup_name"][
"help_text"] = _("Unique name for this hostgroup (e.g. webservers)")
object_definitions["hostgroup"]["alias"][
"help_text"] = _("Human friendly name (e.g. My Web Servers)")
object_definitions["hostgroup"]["members"][
"help_text"] = _("List of hosts that belong to this group")
object_definitions["hostgroup"]["hostgroup_members"][
"help_text"] = _("List of hostgroups that belong to this group")
object_definitions["hostgroup"]["notes"][
"help_text"] = _("You can put your custom notes here for your hostgroup")
object_definitions["hostgroup"]["notes_url"][
"help_text"] = _("Type in an url for example to a documentation site for this hostgroup")
object_definitions["hostgroup"]["action_url"]["help_text"] = _(" ")
object_definitions["service"]["host_name"][
"help_text"] = _("e.g. web01.example.com")
object_definitions["service"]["hostgroup_name"][
"help_text"] = _("Hostgroup this service belongs to")
object_definitions["service"]["service_description"][
"help_text"] = _("e.g. 'Disk Status'")
object_definitions["service"]["display_name"]["help_text"] = _(" ")
object_definitions["service"]["servicegroups"][
"help_text"] = _("Servicegroups that this service belongs to")
object_definitions["service"]["is_volatile"]["help_text"] = _(" ")
object_definitions["service"]["check_command"][
"help_text"] = _("Command that is executed when this service is checked")
object_definitions["service"]["initial_state"]["help_text"] = _(" ")
object_definitions["service"]["max_check_attempts"][
"help_text"] = _("How many times to try before failure notifications are sent out")
object_definitions["service"]["check_interval"][
"help_text"] = _("How many minutes to wait between checks")
object_definitions["service"]["retry_interval"][
"help_text"] = _("How many minutes to wait between checks when failure occurs")
object_definitions["service"]["active_checks_enabled"][
"help_text"] = _("Enable if you want nagios to actively check this service")
object_definitions["service"]["passive_checks_enabled"][
"help_text"] = _("Enable if you want nagios to passively accept check results from an external source")
object_definitions["service"]["check_period"][
"help_text"] = _("Period which this service is checked.")
object_definitions["service"]["obsess_over_service"]["help_text"] = _(" ")
object_definitions["service"]["check_freshness"]["help_text"] = _(" ")
object_definitions["service"]["freshness_threshold"]["help_text"] = _(" ")
object_definitions["service"]["event_handler"]["help_text"] = _(" ")
object_definitions["service"]["event_handler_enabled"]["help_text"] = _(" ")
object_definitions["service"]["low_flap_threshold"]["help_text"] = _(" ")
object_definitions["service"]["high_flap_threshold"]["help_text"] = _(" ")
object_definitions["service"]["flap_detection_enabled"]["help_text"] = _(" ")
object_definitions["service"]["flap_detection_options"]["help_text"] = _(" ")
object_definitions["service"]["process_perf_data"]["help_text"] = _(" ")
object_definitions["service"]["retain_status_information"]["help_text"] = _(" ")
object_definitions["service"]["retain_nonstatus_information"]["help_text"] = _(" ")
object_definitions["service"]["notification_interval"]["help_text"] = _(" ")
object_definitions["service"]["first_notification_delay"]["help_text"] = _(" ")
object_definitions["service"]["notification_period"][
"help_text"] = _("Period which notifications are sent out for this service")
object_definitions["service"]["notification_options"]["help_text"] = _(" ")
object_definitions["service"]["notifications_enabled"]["help_text"] = _(" ")
object_definitions["service"]["contacts"][
"help_text"] = _("Which contacts to notify if service fails")
object_definitions["service"]["contact_groups"][
"help_text"] = _("Which contactgroups to send notifications to if service fails")
object_definitions["service"]["stalking_options"]["help_text"] = _(" ")
object_definitions["service"]["notes"]["help_text"] = _(" ")
object_definitions["service"]["notes_url"]["help_text"] = _(" ")
object_definitions["service"]["action_url"]["help_text"] = _(" ")
object_definitions["service"]["icon_image"]["help_text"] = _(" ")
object_definitions["service"]["icon_image_alt"]["help_text"] = _(" ")
object_definitions["servicegroup"]["servicegroup_name"][
"help_text"] = _("Unique name for this service group")
object_definitions["servicegroup"]["alias"][
"help_text"] = _("Human friendly name for this servicegroup")
object_definitions["servicegroup"]["members"][
"help_text"] = _("List of services that belong to this group (Example: localhost,CPU Utilization,localhost,Disk Usage)")
object_definitions["servicegroup"]["servicegroup_members"][
"help_text"] = _("Servicegroups that are members of this servicegroup")
object_definitions["servicegroup"]["notes"][
"help_text"] = _("Arbitrary notes or description of this servicegroup")
object_definitions["servicegroup"]["notes_url"][
"help_text"] = _("Arbitrary url to a site of your choice")
object_definitions["servicegroup"]["action_url"][
"help_text"] = _("Arbitrary url to a site of your choice")
object_definitions["contact"]["contact_name"][
"help_text"] = _("Unique name for this contact (e.g. username@domain.com)")
object_definitions["contact"]["alias"][
"help_text"] = _("Human Friendly Name for this contact (e.g. Full Name)")
object_definitions["contact"]["contactgroups"][
"help_text"] = _("List of groups that this contact is a member of.")
object_definitions["contact"]["host_notifications_enabled"][
"help_text"] = _("If this contact will receive host notifications.")
object_definitions["contact"]["service_notifications_enabled"][
"help_text"] = _("If this contact will receive service notifications.")
object_definitions["contact"]["host_notification_period"][
"help_text"] = _("When will this contact receive host notifications")
object_definitions["contact"]["service_notification_period"][
"help_text"] = _("When will this contact receive service notifications")
object_definitions["contact"]["host_notification_options"][
"help_text"] = _("Which host notifications this contact will receive")
object_definitions["contact"]["service_notification_options"][
"help_text"] = _("Which service notifications this contact will receive")
object_definitions["contact"]["host_notification_commands"][
"help_text"] = _("What command will be used to send host notifications to this contact")
object_definitions["contact"]["service_notification_commands"][
"help_text"] = _("What command will be used to send service notifications to this contact")
object_definitions["contact"]["email"][
"help_text"] = _("E-mail address of this contact")
object_definitions["contact"]["pager"][
"help_text"] = _("Pager number of this contact")
object_definitions["contact"]["address"][
"help_text"] = _("Address of this contact")
object_definitions["contact"]["can_submit_commands"][
"help_text"] = _("If this contact is able to submit commands to nagios command pipe")
object_definitions["contact"]["retain_status_information"]["help_text"] = _(" ")
object_definitions["contact"]["retain_nonstatus_information"]["help_text"] = _(" ")
object_definitions["contactgroup"]["contactgroup_name"][
"help_text"] = _("Unique name for this contact group (e.g. 'webservers')")
object_definitions["contactgroup"]["alias"][
"help_text"] = _("Human Friendly Name (e.g. 'My Web Servers')")
object_definitions["contactgroup"]["members"][
"help_text"] = _("Every Contact listed here will be a member of this contactgroup")
object_definitions["contactgroup"]["contactgroup_members"][
"help_text"] = _("Every Contactgroup listed here will be a member of this contactgroup")
object_definitions["timeperiod"]["timeperiod_name"][
"help_text"] = _("Unique name for this timeperiod (.e.g. 'workhours')")
object_definitions["timeperiod"]["alias"][
"help_text"] = _("Human Friendly name for this timeperiod")
object_definitions["timeperiod"]["[weekday]"]["help_text"] = _(" ")
object_definitions["timeperiod"]["[exception]"]["help_text"] = _(" ")
object_definitions["timeperiod"]["exclude"]["help_text"] = _(" ")
object_definitions["command"]["command_name"][
"help_text"] = _("Unique name for this command")
object_definitions["command"]["command_line"][
"help_text"] = _("Command line of the command that will be executed")
object_definitions["servicedependency"][
"dependent_host_name"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"dependent_hostgroup_name"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"dependent_service_description"]["help_text"] = _(" ")
object_definitions["servicedependency"]["host_name"]["help_text"] = _(" ")
object_definitions["servicedependency"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"service_description"]["help_text"] = _(" ")
object_definitions["servicedependency"]["inherits_parent"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"execution_failure_criteria"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"notification_failure_criteria"]["help_text"] = _(" ")
object_definitions["servicedependency"]["dependency_period"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["host_name"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["serviceescalation"][
"service_description"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["contacts"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["contact_groups"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["first_notification"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["last_notification"]["help_text"] = _(" ")
object_definitions["serviceescalation"][
"notification_interval"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["escalation_period"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["escalation_options"]["help_text"] = _(" ")
object_definitions["hostdependency"]["dependent_host_name"]["help_text"] = _(" ")
object_definitions["hostdependency"][
"dependent_hostgroup_name"]["help_text"] = _(" ")
object_definitions["hostdependency"]["host_name"]["help_text"] = _(" ")
object_definitions["hostdependency"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["hostdependency"]["inherits_parent"]["help_text"] = _(" ")
object_definitions["hostdependency"][
"execution_failure_criteria"]["help_text"] = _(" ")
object_definitions["hostdependency"][
"notification_failure_criteria"]["help_text"] = _(" ")
object_definitions["hostdependency"]["dependency_period"]["help_text"] = _(" ")
object_definitions["hostescalation"]["host_name"]["help_text"] = _(" ")
object_definitions["hostescalation"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["hostescalation"]["contacts"]["help_text"] = _(" ")
object_definitions["hostescalation"]["contact_groups"]["help_text"] = _(" ")
object_definitions["hostescalation"]["first_notification"]["help_text"] = _(" ")
object_definitions["hostescalation"]["last_notification"]["help_text"] = _(" ")
object_definitions["hostescalation"]["notification_interval"]["help_text"] = _(" ")
object_definitions["hostescalation"]["escalation_period"]["help_text"] = _(" ")
object_definitions["hostescalation"]["escalation_options"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["host_name"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["notes"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["notes_url"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["action_url"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["icon_image"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["icon_image_alt"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["vrml_image"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["statusmap_image"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["2d_coords"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["3d_coords"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["host_name"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["service_description"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["notes"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["notes_url"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["action_url"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["icon_image"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["icon_image_alt"]["help_text"] = _(" ")
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
class Attribute(models.Model):
"""This class stores info on how attributes are viewed in django"""
attribute_name = models.CharField(max_length=200)
attribute_friendlyname = models.CharField(max_length=200)
attribute_type = models.CharField(max_length=200)
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import pynag.Model
import adagios.settings
pynag.Model.cfg_file = adagios.settings.nagios_config
class TestObjectBrowser(unittest.TestCase):
def testNagiosConfigFile(self):
result = pynag.Model.ObjectDefinition.objects.all
config = pynag.Model.config.cfg_file
self.assertGreaterEqual(
len(result), 0, msg=_("Parsed nagios.cfg, but found no objects, are you sure this is the right config file (%(config)s) ? ") % {'config': config})
def testIndexPage(self):
c = Client()
response = c.get('/objectbrowser/')
self.assertEqual(response.status_code, 200)
def testPageLoad(self):
""" Smoke test a bunch of views """
# TODO: Better tests, at least squeeze out a 200OK for these views
self.loadPage('/objectbrowser/')
self.loadPage('/objectbrowser/copy', 404)
self.loadPage('/objectbrowser/search')
self.loadPage('/objectbrowser/delete', 404)
self.loadPage('/objectbrowser/bulk_edit')
self.loadPage('/objectbrowser/bulk_delete')
self.loadPage('/objectbrowser/bulk_copy')
self.loadPage('/objectbrowser/edit_all', 404)
self.loadPage('/objectbrowser/copy_and_edit', 301)
self.loadPage('/objectbrowser/confighealth')
self.loadPage('/objectbrowser/plugins')
self.loadPage('/objectbrowser/nagios.cfg')
self.loadPage('/objectbrowser/geek_edit', 404)
self.loadPage('/objectbrowser/advanced_edit', 404)
#self.loadPage('/objectbrowser/add_to_group')
self.loadPage('/objectbrowser/add/host', 200)
self.loadPage('/objectbrowser/add/hostgroup', 200)
self.loadPage('/objectbrowser/add/service', 200)
self.loadPage('/objectbrowser/add/servicegroup', 200)
self.loadPage('/objectbrowser/add/contact', 200)
self.loadPage('/objectbrowser/add/contactgroup', 200)
self.loadPage('/objectbrowser/add/timeperiod', 200)
self.loadPage('/objectbrowser/add/command', 200)
self.loadPage('/objectbrowser/add/template', 200)
def loadPage(self, url, expected_code=200):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, expected_code, _("Expected status code 200 for page %(url)s") % {'url': url})
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(error)s") % {'url': url, 'error': e})
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
urlpatterns = patterns('adagios',
url(r'^/$', 'objectbrowser.views.list_object_types', name="objectbrowser"),
url(r'^/edit_all/(?P<object_type>.+)/(?P<attribute_name>.+)/?$', 'objectbrowser.views.edit_all'),
url(r'^/search/?$', 'objectbrowser.views.search_objects', name="search"),
url(r'^/edit/(?P<object_id>.+?)?$', 'objectbrowser.views.edit_object', name="edit_object"),
url(r'^/edit/?$', 'objectbrowser.views.edit_object'),
url(r'^/copy_and_edit/(?P<object_id>.+?)?$', 'objectbrowser.views.copy_and_edit_object'),
url(r'^/copy/(?P<object_id>.+)$', 'objectbrowser.views.copy_object', name="copy_object"),
url(r'^/delete/(?P<object_id>.+)$', 'objectbrowser.views.delete_object', name="delete_object"),
url(r'^/delete/(?P<object_type>.+?)/(?P<shortname>.+)/?$', 'objectbrowser.views.delete_object_by_shortname', name="delete_by_shortname"),
url(r'^/add/(?P<object_type>.+)$', 'objectbrowser.views.add_object', name="addobject"),
url(r'^/bulk_edit/?$', 'objectbrowser.views.bulk_edit', name='bulk_edit'),
url(r'^/bulk_delete/?$', 'objectbrowser.views.bulk_delete', name='bulk_delete'),
url(r'^/bulk_copy/?$', 'objectbrowser.views.bulk_copy', name='bulk_copy'),
url(r'^/add_to_group/(?P<group_type>.+)/(?P<group_name>.+)/?$', 'objectbrowser.views.add_to_group'),
url(r'^/add_to_group/(?P<group_type>.+)/?$', 'objectbrowser.views.add_to_group'),
url(r'^/add_to_group', 'objectbrowser.views.add_to_group'),
url(r'^/confighealth/?$', 'objectbrowser.views.config_health'),
url(r'^/plugins/?$', 'objectbrowser.views.show_plugins'),
url(r'^/nagios.cfg/?$', 'objectbrowser.views.edit_nagios_cfg'),
url(r'^/nagios.cfg/edit/?$', 'misc.views.edit_nagios_cfg'),
url(r'^/geek_edit/id=(?P<object_id>.+)$', 'objectbrowser.views.geek_edit'),
url(r'^/advanced_edit/id=(?P<object_id>.+)$', 'objectbrowser.views.advanced_edit'),
# Here for backwards compatibility.
url(r'^/edit/id=(?P<object_id>.+)$', 'objectbrowser.views.edit_object', ),
url(r'^/id=(?P<object_id>.+)$', 'objectbrowser.views.edit_object', ),
# These should be deprecated as of 2012-08-27
url(r'^/copy_object/id=(?P<object_id>.+)$', 'objectbrowser.views.copy_object'),
url(r'^/delete_object/id=(?P<object_id>.+)$', 'objectbrowser.views.delete_object'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render_to_response, redirect, HttpResponse, Http404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
import os
from os.path import dirname
from pynag.Model import ObjectDefinition, string_to_class
from pynag import Model
from pynag.Parsers import status
import pynag.Utils
from collections import defaultdict, namedtuple
import pynag.Model
from adagios import settings
from adagios.objectbrowser.forms import *
from adagios.views import adagios_decorator
@adagios_decorator
def home(request):
return redirect('adagios')
@adagios_decorator
def list_object_types(request):
""" Collects statistics about pynag objects and returns to template """
c = {}
return render_to_response('list_object_types.html', c, context_instance=RequestContext(request))
@adagios_decorator
def geek_edit(request, object_id):
""" Function handles POST requests for the geek edit form """
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = []
# Get our object
try:
o = ObjectDefinition.objects.get_by_id(id=object_id)
except Exception, e:
# This is an ugly hack. If unknown object ID was specified and it so happens to
# Be the same as a brand new empty object definition we will assume that we are
# to create a new object definition instead of throwing error because ours was
# not found.
for i in Model.string_to_class.values():
if i().get_id() == object_id:
o = i()
break
else:
c['error_summary'] = _('Unable to find object')
c['error'] = e
return render_to_response('error.html', c, context_instance=RequestContext(request))
c['my_object'] = o
if request.method == 'POST':
# Manual edit of the form
form = GeekEditObjectForm(pynag_object=o, data=request.POST)
if form.is_valid():
try:
form.save()
m.append("Object Saved manually to '%s'" % o['filename'])
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
else:
c['errors'].append(_("Problem with saving object"))
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
else:
form = GeekEditObjectForm(
initial={'definition': o['meta']['raw_definition'], })
c['geek_edit'] = form
# Lets return the user to the general edit_object form
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': o.get_id()}))
@adagios_decorator
def advanced_edit(request, object_id):
""" Handles POST only requests for the "advanced" object edit form. """
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = []
# Get our object
try:
o = ObjectDefinition.objects.get_by_id(id=object_id)
c['my_object'] = o
except Exception, e:
# This is an ugly hack. If unknown object ID was specified and it so happens to
# Be the same as a brand new empty object definition we will assume that we are
# to create a new object definition instead of throwing error because ours was
# not found.
for i in Model.string_to_class.values():
if i().get_id() == object_id:
o = i()
break
else:
c['error_summary'] = _('Unable to get object')
c['error'] = e
return render_to_response('error.html', c, context_instance=RequestContext(request))
if request.method == 'POST':
# User is posting data into our form
c['advanced_form'] = AdvancedEditForm(
pynag_object=o, initial=o._original_attributes, data=request.POST)
if c['advanced_form'].is_valid():
try:
c['advanced_form'].save()
m.append(_("Object Saved to %(filename)s") % o)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
else:
c['errors'].append(_("Problem reading form input"))
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
return HttpResponseRedirect(reverse('edit_object', args=[o.get_id()]))
@adagios_decorator
def edit_object(request, object_id=None):
""" Brings up an edit dialog for one specific object.
If an object_id is specified, bring us to that exact object.
Otherwise we expect some search arguments to have been provided via querystring
"""
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
my_object = None # This is where we store our item that we are editing
# If object_id was not provided, lets see if anything was given to us in a querystring
if not object_id:
objects = pynag.Model.ObjectDefinition.objects.filter(**request.GET)
if len(objects) == 1:
my_object = objects[0]
else:
return search_objects(request)
else:
try:
my_object = pynag.Model.ObjectDefinition.objects.get_by_id(object_id)
except KeyError:
c['error_summary'] = _('Could not find any object with id="%(object_id)s" :/') % {'object_id': object_id}
c['error_type'] = _("object not found")
return render_to_response('error.html', c, context_instance=RequestContext(request))
if request.method == 'POST':
# User is posting data into our form
c['form'] = PynagForm(
pynag_object=my_object,
initial=my_object._original_attributes,
data=request.POST
)
if c['form'].is_valid():
try:
c['form'].save()
c['messages'].append(_("Object Saved to %(filename)s") % my_object)
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': my_object.get_id()}))
except Exception, e:
c['errors'].append(e)
else:
c['errors'].append(_("Could not validate form input"))
if 'form' not in c:
c['form'] = PynagForm(pynag_object=my_object, initial=my_object._original_attributes)
c['my_object'] = my_object
c['geek_edit'] = GeekEditObjectForm(
initial={'definition': my_object['meta']['raw_definition'], })
c['advanced_form'] = AdvancedEditForm(
pynag_object=my_object, initial=my_object._original_attributes)
try:
c['effective_hosts'] = my_object.get_effective_hosts()
except KeyError, e:
c['errors'].append(_("Could not find host: %(error)s") % {'error': str(e)})
except AttributeError:
pass
try:
c['effective_parents'] = my_object.get_effective_parents(cache_only=True)
except KeyError, e:
c['errors'].append(_("Could not find parent: %(error)s") % {'error': str(e)})
# Every object type has some special treatment, so lets resort
# to appropriate helper function
if False:
pass
elif my_object['object_type'] == 'servicegroup':
return _edit_servicegroup(request, c)
elif my_object['object_type'] == 'hostdependency':
return _edit_hostdependency(request, c)
elif my_object['object_type'] == 'service':
return _edit_service(request, c)
elif my_object['object_type'] == 'contactgroup':
return _edit_contactgroup(request, c)
elif my_object['object_type'] == 'hostgroup':
return _edit_hostgroup(request, c)
elif my_object['object_type'] == 'host':
return _edit_host(request, c)
elif my_object['object_type'] == 'contact':
return _edit_contact(request, c)
elif my_object['object_type'] == 'command':
return _edit_command(request, c)
elif my_object['object_type'] == 'servicedependency':
return _edit_servicedependency(request, c)
elif my_object['object_type'] == 'timeperiod':
return _edit_timeperiod(request, c)
else:
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_contact(request, c):
""" This is a helper function to edit_object """
try:
c['effective_contactgroups'] = c[
'my_object'].get_effective_contactgroups()
except KeyError, e:
c['errors'].append(_("Could not find contact: %(error)s") % {'error': str(e)})
return render_to_response('edit_contact.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_service(request, c):
""" This is a helper function to edit_object """
service = c['my_object']
try:
c['command_line'] = service.get_effective_command_line()
except KeyError:
c['command_line'] = None
try:
c['object_macros'] = service.get_all_macros()
except KeyError:
c['object_macros'] = None
# Get the current status from Nagios
try:
s = status()
s.parse()
c['status'] = s.get_servicestatus(
service['host_name'], service['service_description'])
current_state = c['status']['current_state']
if current_state == "0":
c['status']['text'] = 'OK'
c['status']['css_label'] = 'label-success'
elif current_state == "1":
c['status']['text'] = 'Warning'
c['status']['css_label'] = 'label-warning'
elif current_state == "2":
c['status']['text'] = 'Critical'
c['status']['css_label'] = 'label-important'
else:
c['status']['text'] = 'Unknown'
c['status']['css_label'] = 'label-inverse'
except Exception:
pass
try:
c['effective_servicegroups'] = service.get_effective_servicegroups()
except KeyError, e:
c['errors'].append(_("Could not find servicegroup: %(error)s") % {'error': str(e)})
try:
c['effective_contacts'] = service.get_effective_contacts()
except KeyError, e:
c['errors'].append(_("Could not find contact: %(error)s") % {'error': str(e)})
try:
c['effective_contactgroups'] = service.get_effective_contact_groups()
except KeyError, e:
c['errors'].append(_("Could not find contact_group: %(error)s") % {'error': str(e)})
try:
c['effective_hostgroups'] = service.get_effective_hostgroups()
except KeyError, e:
c['errors'].append(_("Could not find hostgroup: %(error)s") % {'error': str(e)})
try:
c['effective_command'] = service.get_effective_check_command()
except KeyError, e:
if service.check_command is not None:
c['errors'].append(_("Could not find check_command: %(error)s") % {'error': str(e)})
elif service.register != '0':
c['errors'].append(_("You need to define a check command"))
# For the check_command editor, we inject current check_command and a list
# of all check_commands
c['check_command'] = (service.check_command or '').split("!")[0]
c['command_names'] = map(
lambda x: x.get("command_name", ''), Model.Command.objects.all)
if c['check_command'] in (None, '', 'None'):
c['check_command'] = ''
if service.hostgroup_name and service.hostgroup_name != 'null':
c['errors'].append(_("This Service is applied to every host in hostgroup %(hostgroup_name)s") % {'hostgroup_name': service.hostgroup_name})
host_name = service.host_name or ''
if ',' in host_name:
c['errors'].append(_("This Service is applied to multiple hosts"))
return render_to_response('edit_service.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_contactgroup(request, c):
""" This is a helper function to edit_object """
try:
c['effective_contactgroups'] = c[
'my_object'].get_effective_contactgroups()
except KeyError, e:
c['errors'].append(_("Could not find contact_group: %(error)s") % {'error': str(e)})
try:
c['effective_contacts'] = c['my_object'].get_effective_contacts()
except KeyError, e:
c['errors'].append("Could not find contact: %s" % str(e))
try:
c['effective_memberof'] = Model.Contactgroup.objects.filter(
contactgroup_members__has_field=c['my_object'].contactgroup_name)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_contactgroup.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_hostgroup(request, c):
""" This is a helper function to edit_object """
hostgroup = c['my_object']
try:
c['effective_services'] = sorted(
hostgroup.get_effective_services(), key=lambda x: x.get_description())
except KeyError, e:
c['errors'].append(_("Could not find service: %(error)s") % {'error': str(e)})
try:
c['effective_memberof'] = Model.Hostgroup.objects.filter(
hostgroup_members__has_field=c['my_object'].hostgroup_name)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_hostgroup.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_servicegroup(request, c):
""" This is a helper function to edit_object """
try:
c['effective_memberof'] = Model.Servicegroup.objects.filter(
servicegroup_members__has_field=c['my_object'].servicegroup_name)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_servicegroup.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_command(request, c):
""" This is a helper function to edit_object """
return render_to_response('edit_command.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_hostdependency(request, c):
""" This is a helper function to edit_object """
return render_to_response('edit_hostdepedency.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_servicedependency(request, c):
""" This is a helper function to edit_object """
return render_to_response('_edit_servicedependency.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_timeperiod(request, c):
""" This is a helper function to edit_object """
return render_to_response('edit_timeperiod.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_host(request, c):
""" This is a helper function to edit_object """
host = c['my_object']
try:
c['command_line'] = host.get_effective_command_line()
except KeyError:
c['command_line'] = None
try:
c['object_macros'] = host.get_all_macros()
except KeyError:
c['object_macros'] = None
if not 'errors' in c:
c['errors'] = []
try:
c['effective_services'] = sorted(
host.get_effective_services(), key=lambda x: x.get_description())
except KeyError, e:
c['errors'].append(_("Could not find service: %(error)s") % {'error': str(e)})
try:
c['effective_hostgroups'] = host.get_effective_hostgroups()
except KeyError, e:
c['errors'].append(_("Could not find hostgroup: %(error)s") % {'error': str(e)})
try:
c['effective_contacts'] = host.get_effective_contacts()
except KeyError, e:
c['errors'].append(_("Could not find contact: %(error)s") % {'error': str(e)})
try:
c['effective_contactgroups'] = host.get_effective_contact_groups()
except KeyError, e:
c['errors'].append(_("Could not find contact_group: %(error)s") % {'error': str(e)})
try:
c['effective_command'] = host.get_effective_check_command()
except KeyError, e:
if host.check_command is not None:
c['errors'].append(_("Could not find check_command: %(error)s") % {'error': str(e)})
elif host.register != '0':
c['errors'].append(_("You need to define a check command"))
try:
s = status()
s.parse()
c['status'] = s.get_hoststatus(host['host_name'])
current_state = c['status']['current_state']
if int(current_state) == 0:
c['status']['text'] = 'UP'
c['status']['css_label'] = 'label-success'
else:
c['status']['text'] = 'DOWN'
c['status']['css_label'] = 'label-important'
except Exception:
pass
return render_to_response('edit_host.html', c, context_instance=RequestContext(request))
@adagios_decorator
def config_health(request):
""" Display possible errors in your nagios config
"""
c = dict()
c['messages'] = []
c['object_health'] = s = {}
c['booleans'] = {}
services_no_description = Model.Service.objects.filter(
register="1", service_description=None)
hosts_without_contacts = []
hosts_without_services = []
objects_with_invalid_parents = []
services_without_contacts = []
services_using_hostgroups = []
services_without_icon_image = []
c['booleans'][
_('Nagios Service has been reloaded since last configuration change')] = not Model.config.needs_reload()
c['booleans'][
_('Adagios configuration cache is up-to-date')] = not Model.config.needs_reparse()
for i in Model.config.errors:
if i.item:
Class = Model.string_to_class[i.item['meta']['object_type']]
i.model = Class(item=i.item)
c['parser_errors'] = Model.config.errors
try:
import okconfig
c['booleans'][
_('OKConfig is installed and working')] = okconfig.is_valid()
except Exception:
c['booleans'][_('OKConfig is installed and working')] = False
s['Parser errors'] = Model.config.errors
s['Services with no "service_description"'] = services_no_description
s['Hosts without any contacts'] = hosts_without_contacts
s['Services without any contacts'] = services_without_contacts
s['Objects with invalid "use" attribute'] = objects_with_invalid_parents
s['Services applied to hostgroups'] = services_using_hostgroups
s['Services without a logo'] = services_without_icon_image
s['Hosts without Service Checks'] = hosts_without_services
if request.GET.has_key('show') and s.has_key(request.GET['show']):
objects = s[request.GET['show']]
return search_objects(request, objects=objects)
else:
return render_to_response('suggestions.html', c, context_instance=RequestContext(request))
@adagios_decorator
def show_plugins(request):
""" Finds all command_line arguments, and shows missing plugins """
c = {}
missing_plugins = []
existing_plugins = []
finished = []
services = Model.Service.objects.all
common_interpreters = ['perl', 'python', 'sh', 'bash']
for s in services:
if not 'check_command' in s._defined_attributes:
continue
check_command = s.check_command.split('!')[0]
if check_command in finished:
continue
finished.append(check_command)
try:
command_line = s.get_effective_command_line()
except KeyError:
continue
if command_line is None:
continue
command_line = command_line.split()
command_name = command_line.pop(0)
if command_name in common_interpreters:
command_name = command_line.pop(0)
if os.path.exists(command_name):
existing_plugins.append((check_command, command_name))
else:
missing_plugins.append((check_command, command_name))
c['missing_plugins'] = missing_plugins
c['existing_plugins'] = existing_plugins
return render_to_response('show_plugins.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit_nagios_cfg(request):
""" This views is made to make modifications to nagios.cfg
"""
from pynag.Model.all_attributes import main_config
c = {'filename': Model.config.cfg_file}
c['content'] = []
for conf in sorted(main_config):
values = []
Model.config.parse_maincfg()
for k, v in Model.config.maincfg_values:
if conf == k:
values.append(v)
c['content'].append({
'doc': main_config[conf]['doc'],
'title': main_config[conf]['title'],
'examples': main_config[conf]['examples'],
'format': main_config[conf]['format'],
'options': main_config[conf]['options'],
'key': conf,
'values': values
})
for key, v in Model.config.maincfg_values:
if key not in main_config:
c['content'].append({
'title': _('No documentation found'),
'key': key,
'values': [v],
'doc': _('This seems to be an undefined option and no documentation was found for it. Perhaps it is'
'mispelled.')
})
c['content'] = sorted(c['content'], key=lambda cfgitem: cfgitem['key'])
return render_to_response('edit_configfile.html', c, context_instance=RequestContext(request))
@adagios_decorator
def bulk_edit(request):
""" Edit multiple objects with one post """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['objects'] = objects = []
# Newer, alternative way to input items from the post data is in the form of
# object_type=shortname
# i.e. timeperiod=24x7, timeperiod=workhours
for i in _querydict_to_objects(request):
objects.append(i)
if request.method == 'GET':
if len(objects) == 1:
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': objects[0].get_id()}), )
c['form'] = BulkEditForm(objects=objects)
if request.method == "POST":
c['form'] = BulkEditForm(objects=objects, data=request.POST)
c['objects'] = c['form'].all_objects
if c['form'].is_valid():
try:
c['form'].save()
for i in c['form'].changed_objects:
c['messages'].append(
_("saved changes to %(object_type)s '%(description)s'") % {'object_type': i.object_type,
'description': i.get_description(),
})
c['success'] = "success"
except IOError, e:
c['errors'].append(e)
return render_to_response('bulk_edit.html', c, context_instance=RequestContext(request))
@adagios_decorator
def bulk_delete(request):
""" Edit delete multiple objects with one post """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['objects'] = objects = []
c['form'] = BulkDeleteForm(objects=objects)
# Newer, alternative way to input items from the post data is in the form of
# object_type=shortname
# i.e. timeperiod=24x7, timeperiod=workhours
for i in _querystring_to_objects(request.GET or request.POST):
try:
obj = pynag.Model.string_to_class[i.object_type].objects.get_by_shortname(i.description)
if obj not in objects:
objects.append(obj)
except KeyError:
c['errors'].append(_("Could not find %(object_type)s '%(description)s' "
"Maybe it has already been deleted.") % {'object_type': i.object_type,
'description': i.description})
if request.method == "GET" and len(objects) == 1:
return HttpResponseRedirect(reverse('delete_object', kwargs={'object_id': objects[0].get_id()}), )
if request.method == "POST":
# Post items starting with "hidden_" will be displayed on the resulting web page
# Post items starting with "change_" will be modified
for i in request.POST.keys():
if i.startswith('change_'):
my_id = i[len('change_'):]
my_obj = ObjectDefinition.objects.get_by_id(my_id)
if my_obj not in objects:
objects.append(my_obj)
c['form'] = BulkDeleteForm(objects=objects, data=request.POST)
if c['form'].is_valid():
try:
c['form'].delete()
c['success'] = "Success"
for i in c['form'].changed_objects:
c['messages'].append(
"Deleted %s %s" % (i.object_type, i.get_description()))
except IOError, e:
c['errors'].append(e)
return render_to_response('bulk_delete.html', c, context_instance=RequestContext(request))
@adagios_decorator
def bulk_copy(request):
""" Copy multiple objects with one post """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['objects'] = objects = []
c['form'] = BulkCopyForm(objects=objects)
# Newer, alternative way to input items from the post data is in the form of
# object_type=shortname
# i.e. timeperiod=24x7, timeperiod=workhours
for i in _querystring_to_objects(request.GET or request.POST):
try:
obj = pynag.Model.string_to_class[i.object_type].objects.get_by_shortname(i.description)
if obj not in objects:
objects.append(obj)
except KeyError:
c['errors'].append(_("Could not find %(object_type)s '%(description)s'") % {'object_type': i.object_type,
'description': i.description,
})
if request.method == "GET" and len(objects) == 1:
return HttpResponseRedirect(reverse('copy_object', kwargs={'object_id': objects[0].get_id()}), )
elif request.method == "POST":
# Post items starting with "hidden_" will be displayed on the resulting web page
# Post items starting with "change_" will be modified
for i in request.POST.keys():
if i.startswith('change_'):
my_id = i[len('change_'):]
my_obj = ObjectDefinition.objects.get_by_id(my_id)
if my_obj not in objects:
objects.append(my_obj)
c['form'] = BulkCopyForm(objects=objects, data=request.POST)
if c['form'].is_valid():
try:
c['form'].save()
c['success'] = "Success"
for i in c['form'].changed_objects:
c['messages'].append(
_("Successfully copied %(object_type)s %(description)s") % {'object_type': i.object_type,
'description': i.get_description()})
except IOError, e:
c['errors'].append(e)
return render_to_response('bulk_copy.html', c, context_instance=RequestContext(request))
@adagios_decorator
def delete_object_by_shortname(request, object_type, shortname):
""" Same as delete_object() but uses object type and shortname instead of object_id
"""
obj_type = Model.string_to_class[object_type]
my_obj = obj_type.objects.get_by_shortname(shortname)
return delete_object(request, object_id=my_obj.get_id())
@adagios_decorator
def delete_object(request, object_id):
""" View to Delete a single object definition """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['object'] = my_obj = Model.ObjectDefinition.objects.get_by_id(object_id)
c['form'] = DeleteObjectForm(pynag_object=my_obj, initial=request.GET)
if request.method == 'POST':
try:
c['form'] = f = DeleteObjectForm(
pynag_object=my_obj, data=request.POST)
if f.is_valid():
f.delete()
return HttpResponseRedirect(reverse('objectbrowser') + "#" + my_obj.object_type)
except Exception, e:
c['errors'].append(e)
return render_to_response('delete_object.html', c, context_instance=RequestContext(request))
@adagios_decorator
def copy_object(request, object_id):
""" View to Copy a single object definition """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['object'] = my_obj = Model.ObjectDefinition.objects.get_by_id(object_id)
if request.method == 'GET':
c['form'] = CopyObjectForm(pynag_object=my_obj, initial=request.GET)
elif request.method == 'POST':
c['form'] = f = CopyObjectForm(pynag_object=my_obj, data=request.POST)
if f.is_valid():
try:
f.save()
c['copied_objects'] = f.copied_objects
c['success'] = 'success'
except IndexError, e:
c['errors'].append(e)
return render_to_response('copy_object.html', c, context_instance=RequestContext(request))
@adagios_decorator
def add_object(request, object_type):
""" Friendly wizard on adding a new object of any particular type
"""
c = {}
c['messages'] = []
c['errors'] = []
c['object_type'] = object_type
if request.method == 'GET' and object_type == 'template':
c['form'] = AddTemplateForm(initial=request.GET)
elif request.method == 'GET':
c['form'] = AddObjectForm(object_type, initial=request.GET)
elif request.method == 'POST' and object_type == 'template':
c['form'] = AddTemplateForm(data=request.POST)
elif request.method == 'POST':
c['form'] = AddObjectForm(object_type, data=request.POST)
else:
c['errors'].append(_("Something went wrong while calling this form"))
# This is what happens in post regardless of which type of form it is
if request.method == 'POST' and 'form' in c:
# If form is valid, save object and take user to edit_object form.
if c['form'].is_valid():
c['form'].save()
object_id = c['form'].pynag_object.get_id()
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': object_id}), )
else:
c['errors'].append(_('Could not validate form input'))
return render_to_response('add_object.html', c, context_instance=RequestContext(request))
def _querystring_to_objects(dictionary):
""" Finds all nagios objects in a querystring and returns a list of objects
>>> dictionary = {'host':('localhost1', 'localhost2'),}
>>> print _querystring_to_objects
{'host':('localhost1','localhost2')}
"""
result = []
Object = namedtuple('Object', 'object_type description')
for object_type in string_to_class.keys():
objects = dictionary.getlist(object_type)
for i in objects:
obj = (Object(object_type, i))
result.append(obj)
return result
def _querydict_to_objects(request, raise_on_not_found=False):
""" Finds all object specifications in a querydict and returns a list of pynag objects
Typically this is used to name specific objects from the querystring.
Valid input in the request is either id=object_id or object_type=short_name
Arguments:
request - A django request object. Usually the data is in a querystring or POST data
- Example: host=localhost,service=localhost/Ping
raise_on_not_found - Raise ValueError if some object is not found
Returns:
List of pynag objects
"""
result = []
mydict = request.GET or request.POST
# Find everything in the querystring in the form of id=[object_ids]
for object_id in mydict.getlist('id'):
try:
my_object = ObjectDefinition.objects.get_by_id(object_id)
result.append(my_object)
except Exception, e:
if raise_on_not_found is True:
raise e
# Find everything in querystring in the form of object_type=[shortnames]
for object_type,Class in string_to_class.items():
objects = mydict.getlist(object_type)
for shortname in objects:
try:
my_object = Class.objects.get_by_shortname(shortname)
result.append(my_object)
except Exception, e:
# If a service was not found, check if it was registered in
# some unusual way
if object_type == 'service' and '/' in shortname:
host_name,service_description = shortname.split('/', 1)
result.append(_find_service(host_name, service_description))
if raise_on_not_found is True:
raise e
return result
def _find_service(host_name, service_description):
""" Returns pynag.Model.Service matching our search filter """
result = pynag.Model.Service.objects.filter(host_name__has_field=host_name, service_description=service_description)
if not result:
host = pynag.Model.Host.objects.get_by_shortname(host_name, cache_only=True)
for i in host.get_effective_services():
if i.service_description == service_description:
result = [i]
break
return result[0]
@adagios_decorator
def add_to_group(request, group_type=None, group_name=''):
""" Add one or more objects into a group
"""
c = {}
messages = []
errors = []
if not group_type:
raise Exception(_("Please include group type"))
if request.method == 'GET':
objects = _querystring_to_objects(request.GET)
elif request.method == 'POST':
objects = _querystring_to_objects(request.GET)
for i in objects:
try:
obj = pynag.Model.string_to_class[i.object_type].objects.get_by_shortname(i.description)
if group_type == 'contactgroup':
obj.add_to_contactgroup(group_name)
elif group_type == 'hostgroup':
obj.add_to_hostgroup(group_name)
elif group_type == 'servicegroup':
obj.add_to_servicegroup(group_name)
return HttpResponse("Success")
except Exception, e:
errortype = e.__dict__.get('__name__') or str(type(e))
error = str(e)
return HttpResponse(_("Failed to add object: %(errortype)s %(error)s ") % {'errortype': errortype,
'error': error,
})
return render_to_response('add_to_group.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def edit_all(request, object_type, attribute_name):
""" Edit many objects at once, changing only a single attribute
Example:
Edit notes_url of all services
"""
messages = []
errors = []
objects = Model.string_to_class.get(object_type).objects.all
objects = map(lambda x: (x.get_shortname, x.get(attribute_name)), objects)
return render_to_response('edit_all.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def search_objects(request, objects=None):
""" Displays a list of pynag objects, search parameters can be entered via querystring
Arguments:
objects -- List of pynag objects to show. If it is not set,
-- We will use querystring instead as search arguments
example:
/adagios/objectbrowser/search?object_type=host&host_name__startswith=foo
"""
messages = []
errors = []
if not objects:
objects = pynag.Model.ObjectDefinition.objects.filter(**request.GET)
# A special case, if no object was found, lets check if user was looking for a service
# With its host_name / service_description pair, and the service is applied to hostgroup instead
if not objects and request.GET.get('object_type') == 'service':
host_name = request.GET.get('host_name')
service_description = request.GET.get('service_description')
shortname = request.GET.get('shortname')
# If shortname was provided instead of host_name / service_description
if not host_name and not service_description and shortname:
host_name, service_description = shortname.split('/')
# If at this point we have found some objects, then lets do a special workaround
services = [_find_service(host_name, service_description)]
errors.append(_('be careful'))
return render_to_response('search_objects.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def copy_and_edit_object(request, object_id):
""" Create a new object, and open up an edit dialog for it.
If object_id is provided, that object will be copied into this one.
"""
kwargs = {}
for k, v in request.GET.items():
if v in ('', None, 'None'):
v = None
kwargs[k] = v
o = pynag.Model.ObjectDefinition.objects.get_by_id(object_id)
o = o.copy(**kwargs)
o = pynag.Model.ObjectDefinition.objects.filter(shortname=o.get_shortname(), object_type=o.object_type)[0]
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': o.get_id()}))
########NEW FILE########
__FILENAME__ = forms
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
import okconfig
from adagios.misc import helpers
import re
from django.core.exceptions import ValidationError
import socket
from pynag import Model
from adagios.forms import AdagiosForm
from django.utils.translation import ugettext as _
def get_all_hosts():
return [('', _('Select a host'))] + map(lambda x: (x, x), helpers.get_host_names())
def get_all_templates():
all_templates = okconfig.get_templates()
service_templates = filter(lambda x: 'host' not in x, all_templates)
return map(lambda x: (x, _("Standard %(service_template)s checks") % {"service_template": x}), service_templates)
def get_all_groups():
return map(lambda x: (x, x), okconfig.get_groups())
def get_inactive_services():
""" List of all unregistered services (templates) """
inactive_services = [('', _('Select a service'))]
inactive_services += map(lambda x: (x.name, x.name),
Model.Service.objects.filter(service_description__contains="", name__contains="", register="0"))
inactive_services.sort()
return inactive_services
class ScanNetworkForm(AdagiosForm):
network_address = forms.CharField()
def clean_network_address(self):
addr = self.cleaned_data['network_address']
if addr.find('/') > -1:
addr, mask = addr.split('/', 1)
if not mask.isdigit():
raise ValidationError(_("not a valid netmask"))
if not self.isValidIPAddress(addr):
raise ValidationError(_("not a valid ip address"))
else:
if not self.isValidIPAddress(addr):
raise ValidationError(_("not a valid ip address"))
return self.cleaned_data['network_address']
def isValidHostname(self, hostname):
if len(hostname) > 255:
return False
if hostname[-1:] == ".":
# strip exactly one dot from the right, if present
hostname = hostname[:-1]
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
for x in hostname.split("."):
if allowed.match(x) is False:
return False
return True
def isValidIPAddress(self, ipaddress):
try:
socket.inet_aton(ipaddress)
except Exception:
return False
return True
class AddGroupForm(AdagiosForm):
group_name = forms.CharField(help_text=_("Example: databases"))
alias = forms.CharField(help_text=_("Human friendly name for the group"))
force = forms.BooleanField(
required=False, help_text=_("Overwrite group if it already exists."))
class AddHostForm(AdagiosForm):
host_name = forms.CharField(help_text=_("Name of the host to add"))
address = forms.CharField(help_text=_("IP Address of this host"))
group_name = forms.ChoiceField(
initial="default", help_text=_("host/contact group to put this host in"))
templates = forms.MultipleChoiceField(
required=False, help_text=_("Add standard template of checks to this host"))
force = forms.BooleanField(
required=False, help_text=_("Overwrite host if it already exists."))
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.fields['group_name'].choices = choices = get_all_groups()
self.fields['templates'].choices = get_all_templates()
def clean(self):
cleaned_data = super(AddHostForm, self).clean()
force = self.cleaned_data.get('force')
host_name = self.cleaned_data.get('host_name')
templates = self.cleaned_data.get('templates')
for i in templates:
if i not in okconfig.get_templates().keys():
self._errors['templates'] = self.error_class(
[_('template %s was not found') % i])
if not force and host_name in okconfig.get_hosts():
self._errors['host_name'] = self.error_class(
[_('Host name already exists. Use force to overwrite')])
return cleaned_data
class AddTemplateForm(AdagiosForm):
# Attributes
host_name = forms.ChoiceField(help_text=_("Add templates to this host"))
templates = forms.MultipleChoiceField(
required=False, help_text=_("Add standard template of checks to this host"))
force = forms.BooleanField(
required=False, help_text=_("Overwrites templates if they already exist"))
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.fields['templates'].choices = get_all_templates()
self.fields['host_name'].choices = get_all_hosts()
def clean(self):
cleaned_data = super(AddTemplateForm, self).clean()
force = self.cleaned_data.get('force')
host_name = self.cleaned_data.get('host_name')
templates = self.cleaned_data.get('templates')
for i in templates:
if i not in okconfig.get_templates().keys():
self._errors['templates'] = self.error_class(
[_('template %s was not found') % i])
if not force and host_name not in okconfig.get_hosts():
self._errors['host_name'] = self.error_class(
[_('Host name not found Use force to write template anyway')])
return cleaned_data
def save(self):
host_name = self.cleaned_data['host_name']
templates = self.cleaned_data['templates']
force = self.cleaned_data['force']
self.filelist = []
for i in templates:
self.filelist += okconfig.addtemplate(
host_name=host_name, template_name=i, force=force)
class InstallAgentForm(AdagiosForm):
remote_host = forms.CharField(help_text=_("Host or ip address"))
install_method = forms.ChoiceField(
initial='ssh', help_text=_("Make sure firewalls are not blocking ports 22(for ssh) or 445(for winexe)"),
choices=[(_('auto detect'), _('auto detect')), ('ssh', 'ssh'), ('winexe', 'winexe')])
username = forms.CharField(
initial='root', help_text=_("Log into remote machine with as this user"))
password = forms.CharField(
required=False, widget=forms.PasswordInput, help_text=_("Leave empty if using kerberos or ssh keys"))
windows_domain = forms.CharField(
required=False, help_text=_("If remote machine is running a windows domain"))
class ChooseHostForm(AdagiosForm):
host_name = forms.ChoiceField(help_text=_("Select which host to edit"))
def __init__(self, service=Model.Service(), *args, **kwargs):
super(forms.Form, self).__init__(*args, **kwargs)
self.fields['host_name'].choices = get_all_hosts()
class AddServiceToHostForm(AdagiosForm):
host_name = forms.ChoiceField(
help_text=_("Select host which you want to add service check to"))
service = forms.ChoiceField(
help_text=_("Select which service check you want to add to this host"))
def __init__(self, service=Model.Service(), *args, **kwargs):
super(forms.Form, self).__init__(*args, **kwargs)
self.fields['host_name'].choices = get_all_hosts()
self.fields['service'].choices = get_inactive_services()
class EditTemplateForm(AdagiosForm):
def __init__(self, service=Model.Service(), *args, **kwargs):
self.service = service
super(forms.Form, self).__init__(*args, **kwargs)
# Run through all the all attributes. Add
# to form everything that starts with "_"
self.description = service['service_description']
fieldname = "%s::%s::%s" % (
service['host_name'], service['service_description'], 'register')
self.fields[fieldname] = forms.BooleanField(
required=False, initial=service['register'] == "1", label='register')
self.register = fieldname
macros = []
self.command_line = None
try:
self.command_line = service.get_effective_command_line()
for macro, value in service.get_all_macros().items():
if macro.startswith('$_SERVICE') or macro.startswith('S$ARG'):
macros.append(macro)
for k in sorted(macros):
fieldname = "%s::%s::%s" % (
service['host_name'], service['service_description'], k)
label = k.replace('$_SERVICE', '')
label = label.replace('_', ' ')
label = label.replace('$', '')
label = label.capitalize()
self.fields[fieldname] = forms.CharField(
required=False, initial=service.get_macro(k), label=label)
# KeyError can occur if service has an invalid check_command
except KeyError:
pass
def save(self):
for i in self.changed_data:
# Changed data comes in the format host_name::service_description::$_SERVICE_PING
# We need to change that to just __PING
field_name = i.split('::')[2]
field_name = field_name.replace('$_SERVICE', '_')
field_name = field_name.replace('$', '')
data = self.cleaned_data[i]
if field_name == 'register':
data = int(data)
self.service[field_name] = data
self.service.save()
self.service.reload_object()
# Lets also update commandline because form is being returned to the
# user
self.command_line = self.service.get_effective_command_line()
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import okconfig
import adagios.settings
okconfig.cfg_file = adagios.settings.nagios_config
class TestOkconfig(unittest.TestCase):
def testOkconfigVerifies(self):
result = okconfig.verify()
for k, v in result.items():
self.assertTrue(v, msg=_("Failed on test: %s") % k)
def testIndexPage(self):
c = Client()
response = c.get('/okconfig/verify_okconfig')
self.assertEqual(response.status_code, 200)
def testPageLoad(self):
""" Smoketest for the okconfig views """
self.loadPage('/okconfig/addhost')
self.loadPage('/okconfig/scan_network')
self.loadPage('/okconfig/addgroup')
self.loadPage('/okconfig/addtemplate')
self.loadPage('/okconfig/addhost')
self.loadPage('/okconfig/addservice')
self.loadPage('/okconfig/install_agent')
self.loadPage('/okconfig/edit')
self.loadPage('/okconfig/edit/localhost')
self.loadPage('/okconfig/verify_okconfig')
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(e)s") % {'url': url, 'e': e})
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
#(r'^/?$', 'okconfig_.views.index'),
(r'^/scan_network/?', 'okconfig_.views.scan_network'),
(r'^/addgroup/?', 'okconfig_.views.addgroup'),
(r'^/addtemplate/?', 'okconfig_.views.addtemplate'),
(r'^/addhost/?', 'okconfig_.views.addhost'),
(r'^/addservice/?', 'okconfig_.views.addservice'),
(r'^/install_agent/?', 'okconfig_.views.install_agent'),
(r'^/edit/?$', 'okconfig_.views.choose_host'),
(r'^/edit/(?P<host_name>.+)$', 'okconfig_.views.edit'),
(r'^/verify_okconfig/?',
'okconfig_.views.verify_okconfig'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render_to_response, redirect
from django.core import serializers
from django.http import HttpResponse, HttpResponseServerError, HttpResponseRedirect
from django.utils import simplejson
from django.core.context_processors import csrf
from django.template import RequestContext
from django.utils.translation import ugettext as _
from adagios.views import adagios_decorator
from django.core.urlresolvers import reverse
from adagios.okconfig_ import forms
import okconfig
import okconfig.network_scan
from pynag import Model
@adagios_decorator
def addcomplete(request, c=None):
""" Landing page when a new okconfig group has been added
"""
if not c:
c = {}
return render_to_response('addcomplete.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addgroup(request):
""" Add a new okconfig group
"""
c = {}
c['messages'] = []
c['errors'] = []
# If there is a problem with the okconfig setup, lets display an error
if not okconfig.is_valid():
return verify_okconfig(request)
if request.method == 'GET':
f = forms.AddGroupForm(initial=request.GET)
elif request.method == 'POST':
f = forms.AddGroupForm(request.POST)
if f.is_valid():
group_name = f.cleaned_data['group_name']
alias = f.cleaned_data['alias']
force = f.cleaned_data['force']
try:
c['filelist'] = okconfig.addgroup(
group_name=group_name, alias=alias, force=force)
c['group_name'] = group_name
return addcomplete(request, c)
except Exception, e:
c['errors'].append(_("error adding group: %s") % e)
else:
c['errors'].append(_('Could not validate input'))
else:
raise Exception("Sorry i only support GET or POST")
c['form'] = f
return render_to_response('addgroup.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addhost(request):
""" Add a new host from an okconfig template
"""
c = {}
c['messages'] = []
c['errors'] = []
# If there is a problem with the okconfig setup, lets display an error
if not okconfig.is_valid():
return verify_okconfig(request)
if request.method == 'GET':
f = forms.AddHostForm(initial=request.GET)
elif request.method == 'POST':
f = forms.AddHostForm(request.POST)
if f.is_valid():
host_name = f.cleaned_data['host_name']
group_name = f.cleaned_data['group_name']
address = f.cleaned_data['address']
templates = f.cleaned_data['templates']
#description = f.cleaned_data['description']
force = f.cleaned_data['force']
try:
c['filelist'] = okconfig.addhost(host_name=host_name, group_name=group_name, address=address,
force=force, templates=templates)
c['host_name'] = host_name
return addcomplete(request, c)
except Exception, e:
c['errors'].append(_("error adding host: %s") % e)
else:
c['errors'].append(_('Could not validate input'))
else:
raise Exception("Sorry i only support GET or POST")
c['form'] = f
return render_to_response('addhost.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addtemplate(request, host_name=None):
""" Add a new okconfig template to a host
"""
c = {}
c['messages'] = []
c['errors'] = []
# If there is a problem with the okconfig setup, lets display an error
if not okconfig.is_valid():
return verify_okconfig(request)
c['form'] = forms.AddTemplateForm(initial=request.GET)
if request.method == 'POST':
c['form'] = f = forms.AddTemplateForm(request.POST)
if f.is_valid():
try:
f.save()
c['host_name'] = host_name = f.cleaned_data['host_name']
c['filelist'] = f.filelist
c['messages'].append(
_("Template was successfully added to host."))
return HttpResponseRedirect(reverse('adagios.okconfig_.views.edit', args=[host_name]))
except Exception, e:
c['errors'].append(e)
else:
c['errors'].append(_("Could not validate form"))
return render_to_response('addtemplate.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addservice(request):
""" Create a new service derived from an okconfig template
"""
c = {}
c.update(csrf(request))
c['form'] = forms.AddServiceToHostForm()
c['messages'] = []
c['errors'] = []
c['filename'] = Model.config.cfg_file
if request.method == 'POST':
c['form'] = form = forms.AddServiceToHostForm(data=request.POST)
if form.is_valid():
host_name = form.cleaned_data['host_name']
host = Model.Host.objects.get_by_shortname(host_name)
service = form.cleaned_data['service']
new_service = Model.Service()
new_service.host_name = host_name
new_service.use = service
new_service.set_filename(host.get_filename())
# new_service.reload_object()
c['my_object'] = new_service
# Add custom macros if any were specified
for k, v in form.data.items():
if k.startswith("_") or k.startswith('service_description'):
new_service[k] = v
try:
new_service.save()
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': new_service.get_id()}))
except IOError, e:
c['errors'].append(e)
else:
c['errors'].append(_("Could not validate form"))
return render_to_response('addservice.html', c, context_instance=RequestContext(request))
@adagios_decorator
def verify_okconfig(request):
""" Checks if okconfig is properly set up. """
c = {}
c['errors'] = []
c['okconfig_checks'] = okconfig.verify()
for i in c['okconfig_checks'].values():
if i == False:
c['errors'].append(
_('There seems to be a problem with your okconfig installation'))
break
return render_to_response('verify_okconfig.html', c, context_instance=RequestContext(request))
@adagios_decorator
def install_agent(request):
""" Installs an okagent on a remote host """
c = {}
c['errors'] = []
c['messages'] = []
c['form'] = forms.InstallAgentForm(initial=request.GET)
c['nsclient_installfiles'] = okconfig.config.nsclient_installfiles
if request.method == 'POST':
c['form'] = f = forms.InstallAgentForm(request.POST)
if f.is_valid():
f.clean()
host = f.cleaned_data['remote_host']
user = f.cleaned_data['username']
passw = f.cleaned_data['password']
method = f.cleaned_data['install_method']
domain = f.cleaned_data['windows_domain']
try:
status, out, err = okconfig.install_okagent(
remote_host=host, domain=domain, username=user, password=passw, install_method=method)
c['exit_status'] = status
c['stderr'] = err
# Do a little cleanup in winexe stdout, it is irrelevant
out = out.split('\n')
c['stdout'] = []
for i in out:
if i.startswith(_('Unknown parameter encountered:')):
continue
elif i.startswith(_('Ignoring unknown parameter')):
continue
elif 'NT_STATUS_LOGON_FAILURE' in i:
c['hint'] = _("NT_STATUS_LOGON_FAILURE usually means there is a problem with username or password. Are you using correct domain ?")
elif 'NT_STATUS_DUPLICATE_NAME' in i:
c['hint'] = _("The security settings on the remote windows host might forbid logins if the host name specified does not match the computername on the server. Try again with either correct hostname or the ip address of the server.")
elif 'NT_STATUS_ACCESS_DENIED' in i:
c['hint'] = _("Please make sure that %(admin)s is a local administrator on host %(host)s") % {
'admin': user, 'host': host}
elif i.startswith('Error: Directory') and i.endswith('not found'):
c['hint'] = _("No nsclient copy found ")
c['stdout'].append(i)
c['stdout'] = '\n'.join(c['stdout'])
except Exception, e:
c['errors'].append(e)
else:
c['errors'].append(_('invalid input'))
return render_to_response('install_agent.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit(request, host_name):
""" Edit all the Service "__MACROS" for a given host """
c = {}
c['errors'] = []
c['messages'] = []
c.update(csrf(request))
c['hostname'] = host_name
c['host_name'] = host_name
c['forms'] = myforms = []
try:
c['myhost'] = Model.Host.objects.get_by_shortname(host_name)
except KeyError, e:
c['errors'].append(_("Host %s not found") % e)
return render_to_response('edittemplate.html', c, context_instance=RequestContext(request))
# Get all services of that host that contain a service_description
services = Model.Service.objects.filter(
host_name=host_name, service_description__contains='')
if request.method == 'GET':
for service in services:
myforms.append(forms.EditTemplateForm(service=service))
elif request.method == 'POST':
# All the form fields have an id of HOST::SERVICE::ATTRIBUTE
for service in services:
form = forms.EditTemplateForm(service=service, data=request.POST)
myforms.append(form)
if form.is_valid():
try:
if form.changed_data != []:
form.save()
c['messages'].append(
_("'%s' successfully saved.") % service.get_description())
except Exception, e:
c['errors'].append(
_("Failed to save service %(service)s: %(exc)s") % {'service': service.get_description(), 'exc': e})
else:
c['errors'].append(
_('invalid data in %s') % service.get_description())
c['forms'] = myforms
return render_to_response('edittemplate.html', c, context_instance=RequestContext(request))
@adagios_decorator
def choose_host(request):
"""Simple form that lets you choose one host to edit"""
c = {}
c.update(csrf(request))
if request.method == 'GET':
c['form'] = forms.ChooseHostForm(initial=request.GET)
elif request.method == 'POST':
c['form'] = forms.ChooseHostForm(data=request.POST)
if c['form'].is_valid():
host_name = c['form'].cleaned_data['host_name']
return HttpResponseRedirect(reverse("adagios.okconfig_.views.edit", args=[host_name]))
return render_to_response('choosehost.html', c, context_instance=RequestContext(request))
@adagios_decorator
def scan_network(request):
""" Scan a single network and show hosts that are alive
"""
c = {}
c['errors'] = []
if not okconfig.is_valid():
return verify_okconfig(request)
if request.method == 'GET':
if request.GET.has_key('network_address'):
initial = request.GET
else:
my_ip = okconfig.network_scan.get_my_ip_address()
network_address = "%s/28" % my_ip
initial = {'network_address': network_address}
c['form'] = forms.ScanNetworkForm(initial=initial)
elif request.method == 'POST':
c['form'] = forms.ScanNetworkForm(request.POST)
if not c['form'].is_valid():
c['errors'].append(_("could not validate form"))
else:
network = c['form'].cleaned_data['network_address']
try:
c['scan_results'] = okconfig.network_scan.get_all_hosts(
network)
for i in c['scan_results']:
i.check()
except Exception, e:
c['errors'].append(_("Error running scan"))
return render_to_response('scan_network.html', c, context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
class LiveStatusForm(forms.Form):
""" This form is used to generate a mk_livestatus query """
table = forms.ChoiceField()
columns = forms.MultipleChoiceField()
filter1 = forms.ChoiceField(required=False)
filter2 = forms.ChoiceField(required=False)
########NEW FILE########
__FILENAME__ = functions
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import pynag.Utils
from pynag.Utils import PynagError
from adagios import settings
import subprocess
from django.utils.translation import ugettext as _
def run_pnp(pnp_command, **kwargs):
""" Run a specific pnp command
Arguments:
pnp_command -- examples: image graph json xml export
host -- filter results for a specific host
srv -- filter results for a specific service
source -- Fetch a specific datasource (0,1,2,3, etc)
view -- Specific timeframe (0 = 4 hours, 1 = 25 hours, etc)
Returns:
Results as they appear from pnp's index.php
Raises:
PynagError if command could not be run
"""
try:
pnp_path = settings.pnp_path
except Exception, e1:
pnp_path = find_pnp_path()
# Cleanup kwargs
pnp_arguments = {}
for k, v in kwargs.items():
k = str(k)
if isinstance(v, list):
v = v[0]
v = str(v)
pnp_arguments[k] = v
querystring = '&'.join(map(lambda x: "%s=%s" % x, pnp_arguments.items()))
pnp_parameters = pnp_command + "?" + querystring
command = ['php', pnp_path, pnp_parameters]
proc = subprocess.Popen(command, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE,)
stdout, stderr = proc.communicate('through stdin to stdout')
result = proc.returncode, stdout, stderr
return result[1]
def find_pnp_path():
""" Look through common locations of pnp4nagios, tries to locate it automatically """
possible_paths = [settings.pnp_filepath]
possible_paths += [
"/usr/share/pnp4nagios/html/index.php",
"/usr/share/nagios/html/pnp4nagios/index.php"
]
for i in possible_paths:
if os.path.isfile(i):
return i
raise PynagError(
_("Could not find pnp4nagios/index.php. Please specify it in adagios->settings->PNP. Tried %s") % possible_paths)
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import pynag.Parsers
from adagios.settings import nagios_config
from adagios.pnp import functions
class PNP4NagiosTestCase(unittest.TestCase):
def testPnpIsConfigured(self):
config = pynag.Parsers.config()
config.parse_maincfg()
for k, v in config.maincfg_values:
if k == "broker_module" and v.find('npcd') > 1:
tmp = v.split()
self.assertFalse(
len(tmp) < 2, _('We think pnp4nagios broker module is incorrectly configured. In nagios.cfg it looks like this: %s') % v)
module_file = tmp.pop(0)
self.assertTrue(
os.path.exists(module_file), _('npcd broker_module module not found at "%s". Is nagios correctly configured?') % module_file)
config_file = None
for i in tmp:
if i.startswith('config_file='):
config_file = i.split('=', 1)[1]
break
self.assertIsNotNone(
config_file, _("npcd broker module has no config_file= argument. Is pnp4nagios correctly configured?"))
self.assertTrue(
os.path.exists(config_file), _('PNP4nagios config file was not found (%s).') % config_file)
return
self.assertTrue(
False, _('Nagios Broker module not found. Is pnp4nagios installed and configured?'))
def testGetJson(self):
result = functions.run_pnp('json')
self.assertGreaterEqual(
len(result), 0, msg=_("Tried to get json from pnp4nagios but result was improper"))
def testPageLoad(self):
c = Client()
response = c.get('/pnp/json')
self.assertEqual(response.status_code, 200)
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
(r'^/(?P<pnp_command>.+)?$', 'pnp.views.pnp'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.context_processors import csrf
from django.shortcuts import render_to_response
from django.shortcuts import HttpResponse
from adagios.pnp.functions import run_pnp
from adagios.views import adagios_decorator
import json
@adagios_decorator
def pnp(request, pnp_command='image'):
c = {}
c['messages'] = []
c['errors'] = []
result = run_pnp(pnp_command, **request.GET)
mimetype = "text"
if pnp_command == 'image':
mimetype = "image/png"
elif pnp_command == 'json':
mimetype = "application/json"
return HttpResponse(result, mimetype)
########NEW FILE########
__FILENAME__ = profiling
#!/usr/bin/env python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Tomas Edwardsson <tommi@tommi.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Code from https://code.djangoproject.com/wiki/ProfilingDjango
# Documentation at
# https://github.com/opinkerfi/adagios/wiki/Profiling-Decorators-within-Adagios
import hotshot
import os
import time
import settings
import tempfile
import random
try:
PROFILE_LOG_BASE = settings.PROFILE_LOG_BASE
except:
PROFILE_LOG_BASE = tempfile.gettempdir()
def profile(log_file):
"""Profile some callable.
This decorator uses the hotshot profiler to profile some callable (like
a view function or method) and dumps the profile data somewhere sensible
for later processing and examination.
It takes one argument, the profile log name. If it's a relative path, it
places it under the PROFILE_LOG_BASE. It also inserts a time stamp into the
file name, such that 'my_view.prof' become 'my_view-20100211T170321.prof',
where the time stamp is in UTC. This makes it easy to run and compare
multiple trials.
"""
if not os.path.isabs(log_file):
log_file = os.path.join(PROFILE_LOG_BASE, log_file)
def _outer(f):
def _inner(*args, **kwargs):
# Add a timestamp to the profile output when the callable
# is actually called.
(base, ext) = os.path.splitext(log_file)
base = base + "-" + time.strftime("%Y%m%dT%H%M%S", time.gmtime()) + str(random.randint(1,9999))
final_log_file = base + ext
prof = hotshot.Profile(final_log_file)
try:
ret = prof.runcall(f, *args, **kwargs)
finally:
prof.close()
return ret
return _inner
return _outer
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = objectbrowser
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Temporary wrapper around pynag helpers script
from adagios.misc.helpers import *
########NEW FILE########
__FILENAME__ = status
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This is a wrapper around the rest functionality that exists in
# The status view. We like to keep the actual implementations there
# because we like to keep code close to its apps
from adagios.status.rest import *
########NEW FILE########
__FILENAME__ = tests
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import json
class LiveStatusTestCase(unittest.TestCase):
def testPageLoad(self):
""" Smoke Test for various rest modules """
self.loadPage('/rest')
self.loadPage('/rest/status/')
self.loadPage('/rest/pynag/')
self.loadPage('/rest/adagios/')
self.loadPage('/rest/status.js')
self.loadPage('/rest/pynag.js')
self.loadPage('/rest/adagios.js')
def testDnsLookup(self):
""" Test the DNS lookup rest call
"""
path = "/rest/pynag/json/dnslookup"
data = {'host_name': 'localhost'}
try:
c = Client()
response = c.post(path=path, data=data)
json_data = json.loads(response.content)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % path)
self.assertEqual(True, 'addresslist' in json_data, _("Expected 'addresslist' to appear in response"))
except KeyError, e:
self.assertEqual(True, _("Unhandled exception while loading %(path)s: %(exc)s") % {'path': path, 'exc': e})
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(exc)s") % {'url': url, 'exc': e})
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
url(r'^/?$', 'rest.views.list_modules'),
)
# Example:
# rest_modules['module_name'] = 'module_path'
# will make /adagios/rest/module_name/ available and it loads all
# functions from 'module_path'
rest_modules = {}
rest_modules['pynag'] = 'adagios.misc.helpers'
rest_modules['okconfig'] = 'okconfig'
rest_modules['status'] = 'adagios.rest.status'
rest_modules['adagios'] = 'adagios.misc.rest'
# We are going to generate some url patterns, for clarification here is the end result shown for the status module:
#url(r'^/status/$', 'rest.views.index', { 'module_name': 'adagios.rest.status' }, name="rest/status"),
#url(r'^/status.js$', 'rest.views.javascript', { 'module_name': 'adagios.rest.status' }, ),
#(r'^/status/(?P<format>.+?)/(?P<attribute>.+?)/?$', 'rest.views.handle_request', { 'module_name': 'adagios.rest.status' }),
for module_name, module_path in rest_modules.items():
base_pattern = r'^/%s' % module_name
args = {'module_name': module_name, 'module_path': module_path}
urlpatterns += patterns('adagios',
url(base_pattern + '/$', 'rest.views.index', args, name="rest/%s" % module_name),
url(base_pattern + '.js$', 'rest.views.javascript', args, ),
url(base_pattern + '/(?P<format>.+?)/(?P<attribute>.+?)/?$', 'rest.views.handle_request', args),
)
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Create your views here.
from django.shortcuts import render_to_response, redirect
from django.core import serializers
from django.http import HttpResponse, HttpResponseServerError
from django.utils import simplejson
#from django.core.context_processors import csrf
from django.views.decorators.csrf import csrf_exempt
from django.template import RequestContext
from django.core.urlresolvers import resolve
from adagios.views import adagios_decorator
import inspect
from django import forms
import os
my_module = None
import adagios.rest.urls
def _load(module_path):
#global my_module
# if not my_module:
my_module = __import__(module_path, None, None, [''])
return my_module
@csrf_exempt
@adagios_decorator
def handle_request(request, module_name, module_path, attribute, format):
m = _load(module_path)
# TODO: Only allow function calls if method == POST
members = {}
for k, v in inspect.getmembers(m):
members[k] = v
item = members[attribute]
docstring = inspect.getdoc(item)
if request.method == 'GET':
if format == 'help':
result = inspect.getdoc(item)
elif not inspect.isfunction(item):
result = item
else:
arguments = request.GET
c = {}
c['function_name'] = attribute
c['form'] = CallFunctionForm(function=item, initial=request.GET)
c['docstring'] = docstring
c['module_name'] = module_name
if not request.GET.items():
return render_to_response('function_form.html', c, context_instance=RequestContext(request))
# Handle get parameters
arguments = {}
for k, v in request.GET.items():
# TODO: Is it safe to turn all digits to int ?
#if str(v).isdigit(): v = int(float(v))
arguments[k.encode('utf-8')] = v.encode('utf-8')
# Here is a special hack, if the method we are calling has an argument
# called "request" we will not let the remote user ship it in.
# instead we give it a django request object
if 'request' in inspect.getargspec(item)[0]:
arguments['request'] = request
result = item(**arguments)
elif request.method == 'POST':
item = members[attribute]
if not inspect.isfunction(item):
result = item
else:
arguments = {} # request.POST.items()
for k, v in request.POST.items():
arguments[k.encode('utf-8')] = v.encode('utf-8')
# Here is a special hack, if the method we are calling has an argument
# called "request" we will not let the remote user ship it in.
# instead we give it a django request object
if 'request' in inspect.getargspec(item)[0]:
arguments['request'] = request
result = item(**arguments)
else:
raise BaseException(_("Unsupported operation: %s") % (request.method, ))
# Everything below is just about formatting the results
if format == 'json':
result = simplejson.dumps(
result, ensure_ascii=False, sort_keys=True, skipkeys=True, indent=4)
mimetype = 'application/javascript'
elif format == 'xml':
# TODO: For some reason Ubuntu does not have this module. Where is
# it? Should we use lxml instead ?
import xml.marshal.generic
result = xml.marshal.generic.dumps(result)
mimetype = 'application/xml'
elif format == 'txt':
result = str(result)
mimetype = 'text/plain'
else:
raise BaseException(
_("Unsupported format: '%s'. Valid formats: json xml txt") %
format)
return HttpResponse(result, mimetype=mimetype)
@adagios_decorator
def list_modules(request):
""" List all available modules and their basic info
"""
rest_modules = adagios.rest.urls.rest_modules
return render_to_response('list_modules.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def index(request, module_name, module_path):
""" This view is used to display the contents of a given python module
"""
m = _load(module_path)
gets, puts = [], []
blacklist = ('argv', 'environ', 'exit', 'path', 'putenv', 'getenv', )
for k, v in inspect.getmembers(m):
if k.startswith('_'):
continue
if k in blacklist:
continue
if inspect.ismodule(v):
continue
elif inspect.isfunction(v):
puts.append(k)
else:
gets.append(k)
c = {}
c['module_path'] = module_path
c['gets'] = gets
c['puts'] = puts
c['module_documenation'] = inspect.getdoc(m)
return render_to_response('index.html', c, context_instance=RequestContext(request))
def javascript(request, module_name, module_path):
""" Create a javascript library that will wrap around module_path module """
m = _load(module_path)
variables, functions = [], []
blacklist = ('argv', 'environ', 'exit', 'path', 'putenv', 'getenv', )
members = {}
for k, v in inspect.getmembers(m):
if k.startswith('_'):
continue
if k in blacklist:
continue
if inspect.ismodule(v):
continue
if inspect.isfunction(v):
functions.append(k)
members[k] = v
else:
variables.append(k)
c = {}
c['module_path'] = module_path
c['module_name'] = module_name
c['gets'] = variables
c['puts'] = functions
c['module_documenation'] = inspect.getdoc(m)
current_url = request.get_full_path()
baseurl = current_url.replace('.js', '')
# Find every function, prepare what is needed so template can
for i in functions:
argspec = inspect.getargspec(members[i])
args, varargs, varkw, defaults = argspec
docstring = inspect.getdoc(members[i])
if defaults is None:
defaults = []
else:
defaults = list(defaults)
# Lets create argstring, for the javascript needed
tmp = [] + args
argstring = []
for num, default in enumerate(reversed(defaults)):
argstring.append('%s=%s' % (tmp.pop(), default))
argstring.reverse()
argstring = tmp + argstring
members[i] = {}
members[i]['args'] = args
members[i]['argstring'] = ','.join(args)
members[i]['varargs'] = varargs
members[i]['varkw'] = varkw
members[i]['defaults'] = defaults
members[i]['docstring'] = docstring
members[i]['url'] = baseurl + "/json/" + i
args, varargs, varkw, defaults = argspec
c['functions'] = members
return render_to_response('javascript.html', c, mimetype="text/javascript", context_instance=RequestContext(request))
class CallFunctionForm(forms.Form):
def __init__(self, function, *args, **kwargs):
super(CallFunctionForm, self).__init__(*args, **kwargs)
# We will create a field for every function_paramater
function_paramaters = {}
# If any paramaters were past via querystring, lets generate fields for
# them
if kwargs.has_key('initial'):
for k, v in kwargs['initial'].items():
function_paramaters[k] = v
# Generate fields which resemble our functions default arguments
argspec = inspect.getargspec(function)
args, varargs, varkw, defaults = argspec
self.show_kwargs = varkw is not None
# We treat the argument 'request' as special. Django request object is going to be
# passed instead of whatever the user wanted
if "request" in args:
args.remove('request')
if defaults is None:
defaults = []
else:
defaults = list(defaults)
for i in args:
self.fields[i] = forms.CharField(label=i)
for k, v in function_paramaters.items():
self.fields[k] = forms.CharField(label=k, initial=v)
while len(defaults) > 0:
value = defaults.pop()
field = args.pop()
self.fields[field].initial = value
########NEW FILE########
__FILENAME__ = settings
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Django settings for adagios project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
USE_TZ = True
# Hack to allow relative template paths
import os
from glob import glob
from warnings import warn
import string
djangopath = os.path.dirname(__file__)
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/test',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
# TIME_ZONE = 'Atlantic/Reykjavik'
TIME_ZONE = None
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = "%s/media/" % (djangopath)
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = 'media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
#ADMIN_MEDIA_PREFIX = '/media/'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'adagios.auth.AuthorizationMiddleWare',
#'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.contrib.messages.middleware.MessageMiddleware',
)
SESSION_ENGINE = 'django.contrib.sessions.backends.file'
LANGUAGES = (
('en', 'English'),
('fr', 'French'),
)
LOCALE_PATHS = (
"%s/locale/" % (djangopath),
)
ROOT_URLCONF = 'adagios.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
"%s/templates" % (djangopath),
)
INSTALLED_APPS = [
#'django.contrib.auth',
#'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
#'django.contrib.messages',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'adagios.objectbrowser',
'adagios.rest',
'adagios.misc',
'adagios.pnp',
'adagios.contrib',
]
TEMPLATE_CONTEXT_PROCESSORS = ('adagios.context_processors.on_page_load',
#"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages")
# Themes options #
# To rapidly switch your theme, update THEME_DEFAULT and leave the rest.
# folders in which themes files will be looked up
THEMES_FOLDER = 'themes' # in 'media/'
# default theme in use, it should be present in the THEMES_FOLDER
# (or at least through a symbolic link)
THEME_DEFAULT = 'default'
# CSS entry-point, in the theme folder
THEME_ENTRY_POINT = 'style.css'
# folder where users preferences are stored
USER_PREFS_PATH = "/etc/adagios/userdata/"
# name displayed in the top left corner
TOPMENU_HOME = 'Adagios'
# items in the top menubar (excluding those coming from %s_menubar.html)
# The identfier is used to recognize active links (which are displayed
# differently).
# The view can begin with '/' (and will go to http://server/...)
# or can be a view name.
# See Nagvis example for direct link, though the template contrib/nagvis.html must be created.
TOPMENU_ITEMS = [
# Name, identifier, view_url, icon
# ('Nagvis', 'nagvis', '/contrib/nagvis.html', 'glyph-display'),
('Configure', 'objectbrowser', 'objectbrowser.views.list_object_types', 'glyph-edit'),
('Nagios', 'nagios', 'misc.views.nagios', 'glyph-list'),
]
# Graphite #
# the url where to fetch data and images
graphite_url = "http://localhost:9091"
# time ranges for generated graphs
# the CSS identifier only needs to be unique here (it will be prefixed)
GRAPHITE_PERIODS = [
# Displayed name, CSS identifier, Graphite period
('4 hours', 'hours', '-4h'),
('One day', 'day', '-1d'),
('One week', 'week', '-1w'),
('One month', 'month', '-1mon'),
('One year', 'year', '-1y'),
]
# querystring that will be passed on to graphite's render method.
graphite_querystring = "target={host_}.{service_}.{metric_}&width=500&height=200&from={from_}d&lineMode=connected&title={title}&target={host_}.{service_}.{metric_}_warn&target={host_}.{service_}.{metric_}_crit"
# Title format to use on all graphite graphs
graphite_title = "{host} - {service} - {metric}"
# default selected (active) tab, and the one rendered in General-preview
GRAPHITE_DEFAULT_TAB = 'day'
# Adagios specific configuration options. These are just the defaults,
# Anything put in /etc/adagios.d/adagios.conf will overwrite this.
nagios_config = None # Sensible default is "/etc/nagios/nagios.cfg"
nagios_url = "/nagios"
nagios_init_script = "/etc/init.d/nagios"
nagios_binary = "/usr/bin/nagios"
livestatus_path = None
enable_githandler = False
enable_loghandler = False
enable_authorization = False
enable_status_view = True
enable_bi = True
enable_graphite = False
contrib_dir = "/var/lib/adagios/contrib/"
serverside_includes = "/etc/adagios/ssi"
escape_html_tags = True
warn_if_selinux_is_active = True
destination_directory = "/etc/nagios/adagios/"
administrators = "nagiosadmin,@users"
pnp_url = "/pnp4nagios"
pnp_filepath = "/usr/share/nagios/html/pnp4nagios/index.php"
include = ""
django_secret_key = ""
map_center = "64.119595,-21.655426"
map_zoom = "10"
title_prefix = "Adagios - "
auto_reload = False
refresh_rate = "30"
plugins = {}
# Profiling settings
#
# You can use the @profile("filename") to profile single functions within
# adagios. Not enabled by default on any function.
#
# Documenations at
# https://github.com/opinkerfi/adagios/wiki/Profiling-Decorators-within-Adagios
PROFILE_LOG_BASE = "/var/lib/adagios"
# Load config files from /etc/adagios
# Adagios uses the configuration file in /etc/adagios/adagios.conf by default.
# If it doesn't exist you should create it. Otherwise a adagios.conf will be
# created in the django project root which should be avoided.
adagios_configfile = "/etc/adagios/adagios.conf"
try:
if not os.path.exists(adagios_configfile):
alternative_adagios_configfile = "%s/adagios.conf" % djangopath
message = "Config file '{adagios_configfile}' not found. Using {alternative_adagios_configfile} instead."
warn(message.format(**locals()))
adagios_configfile = alternative_adagios_configfile
open(adagios_configfile, "a").close()
execfile(adagios_configfile)
# if config has any default include, lets include that as well
configfiles = glob(include)
for configfile in configfiles:
execfile(configfile)
except IOError, e:
warn('Unable to open %s: %s' % (adagios_configfile, e.strerror))
try:
from django.utils.crypto import get_random_string
except ImportError:
def get_random_string(length, stringset=string.ascii_letters + string.digits + string.punctuation):
'''
Returns a string with `length` characters chosen from `stringset`
>>> len(get_random_string(20)) == 20
'''
return ''.join([stringset[i % len(stringset)] for i in [ord(x) for x in os.urandom(length)]])
if not django_secret_key:
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
SECRET_KEY = get_random_string(50, chars)
try:
data = "\n# Automaticly generated secret_key\ndjango_secret_key = '%s'\n" % SECRET_KEY
with open(adagios_configfile, "a") as config_fh:
config_fh.write(data)
except Exception, e:
warn("ERROR: Got %s while trying to save django secret_key in %s" % (type(e), adagios_configfile))
else:
SECRET_KEY = django_secret_key
ALLOWED_INCLUDE_ROOTS = (serverside_includes,)
if enable_status_view:
plugins['status'] = 'adagios.status'
if enable_bi:
plugins['bi'] = 'adagios.bi'
for k, v in plugins.items():
INSTALLED_APPS.append(v)
import adagios.profiling
# default preferences, for new users or when they are not available
PREFS_DEFAULT = {
'language': 'en',
'theme': THEME_DEFAULT,
'refresh_rate': refresh_rate
}
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.utils.translation import ugettext as _
import adagios.status.utils
import adagios.businessprocess
class LiveStatusForm(forms.Form):
""" This form is used to generate a mk_livestatus query """
table = forms.ChoiceField()
columns = forms.MultipleChoiceField()
filter1 = forms.ChoiceField(required=False)
filter2 = forms.ChoiceField(required=False)
class RemoveSubProcessForm(forms.Form):
""" Remove one specific sub process from a business process
"""
process_name = forms.CharField(max_length=100, required=True)
process_type = forms.CharField(max_length=100, required=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(RemoveSubProcessForm, self).__init__(*args, **kwargs)
def save(self):
process_name = self.cleaned_data.get('process_name')
process_type = self.cleaned_data.get('process_type')
self.bp.remove_process(process_name, process_type)
self.bp.save()
status_method_choices = map(
lambda x: (x, x), adagios.businessprocess.BusinessProcess.status_calculation_methods)
class BusinessProcessForm(forms.Form):
""" Use this form to edit a BusinessProcess """
name = forms.CharField(max_length=100, required=True,
help_text=_("Unique name for this business process."))
#processes = forms.CharField(max_length=100, required=False)
display_name = forms.CharField(max_length=100, required=False,
help_text=_("This is the name that will be displayed to users on this process. Usually it is the name of the system this business group represents."))
notes = forms.CharField(max_length=1000, required=False,
help_text=_("Here you can put in any description of the business process you are adding. Its a good idea to write down what the business process is about and who to contact in case of downtimes."))
status_method = forms.ChoiceField(
choices=status_method_choices, help_text=_("Here you can choose which method is used to calculate the global status of this business process"))
state_0 = forms.CharField(max_length=100, required=False,
help_text=_("Human friendly text for this respective state. You can type whatever you want but nagios style exit codes indicate that 0 should be 'ok'"))
state_1 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent warning or performance problems"))
state_2 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent critical status"))
state_3 = forms.CharField(
max_length=100, required=False, help_text=_("Use this when status is unknown"))
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(BusinessProcessForm, self).__init__(*args, **kwargs)
def save(self):
c = self.cleaned_data
self.bp.data.update(c)
self.bp.save()
def remove(self):
c = self.data
process_name = c.get('process_name')
process_type = c.get('process_type')
if process_type == 'None':
process_type = None
self.bp.remove_process(process_name, process_type)
self.bp.save()
def clean(self):
cleaned_data = super(BusinessProcessForm, self).clean()
# If name has changed, look if there is another business process with
# same name.
new_name = cleaned_data.get('name')
if new_name and new_name != self.bp.name:
if new_name in adagios.businessprocess.get_all_process_names():
raise forms.ValidationError(
_("Cannot rename process to %s. Another process with that name already exists") % new_name
)
return cleaned_data
def delete(self):
""" Delete this business process """
self.bp.delete()
def add_process(self):
process_name = self.data.get('process_name')
hostgroup_name = self.data.get('hostgroup_name')
servicegroup_name = self.data.get('servicegroup_name')
service_name = self.data.get('service_name')
if process_name:
self.bp.add_process(process_name, None)
if hostgroup_name:
self.bp.add_process(hostgroup_name, None)
if servicegroup_name:
self.bp.add_process(servicegroup_name, None)
if service_name:
self.bp.add_process(service_name, None)
self.bp.save()
choices = 'businessprocess', 'hostgroup', 'servicegroup', 'service', 'host'
process_type_choices = map(lambda x: (x, x), choices)
class AddSubProcess(forms.Form):
process_type = forms.ChoiceField(choices=process_type_choices)
process_name = forms.CharField(
widget=forms.HiddenInput(attrs={'style': "width: 300px;"}), max_length=100)
display_name = forms.CharField(max_length=100, required=False)
tags = forms.CharField(
max_length=100, required=False, initial="not critical")
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddSubProcess, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_process(**self.cleaned_data)
self.bp.save()
class AddHostgroupForm(forms.Form):
pass
class AddGraphForm(forms.Form):
host_name = forms.CharField(max_length=100,)
service_description = forms.CharField(max_length=100, required=False)
metric_name = forms.CharField(max_length=100, required=True)
notes = forms.CharField(max_length=100, required=False,
help_text=_("Put here a friendly description of the graph"))
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddGraphForm, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_pnp_graph(**self.cleaned_data)
self.bp.save()
########NEW FILE########
__FILENAME__ = graphite
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Matthieu Caneill <matthieu.caneill@savoirfairelinux.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import adagios.settings
ILLEGAL_CHAR = re.compile(r'[^\w-]')
def _get_graphite_url(base, host, service, metric, from_):
""" Constructs an URL for Graphite.
Args:
- base (str): base URL for Graphite access
- host (str): hostname
- service (str): service, e.g. HTTP
- metric (str): metric, e.g. size, time
- from_ (str): Graphite time period
Returns: str
"""
host_ = _compliant_name(host)
service_ = _compliant_name(service)
metric_ = _compliant_name(metric)
base = base.rstrip('/')
title = adagios.settings.graphite_title.format(**locals())
url = "{base}/render?" + adagios.settings.graphite_querystring
url = url.format(**locals())
return url
def _compliant_name(name):
""" Makes the necessary replacements for Graphite. """
if name == '_HOST_':
return '__HOST__'
name = ILLEGAL_CHAR.sub('_', name)
return name
def get(base, host, service, metrics, units):
""" Returns a data structure containg URLs for Graphite.
The structure looks like:
[{'name': 'One day',
'css_id' : 'day',
'metrics': {'size': 'http://url-of-size-metric',
'time': 'http://url-of-time-metric'}
},
{...}]
Args:
- base (str): base URL for Graphite access
- host (str): hostname
- service (str): service, e.g. HTTP
- metrics (list): list of metrics, e.g. ["size", "time"]
- units (list): a list of <name,css_id,unit>,
see adagios.settings.GRAPHITE_PERIODS
Returns: list
"""
graphs = []
for name, css_id, unit in units:
m = {}
for metric in metrics:
m[metric] = _get_graphite_url(base, host, service, metric, unit)
graph = dict(name=name, css_id=css_id, metrics=m)
graphs.append(graph)
return graphs
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = rest
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Convenient stateless functions for the status module. These are meant for programs to interact
with status of Nagios.
"""
import time
import pynag.Control.Command
import pynag.Model
import pynag.Utils
import adagios.status.utils
import pynag.Parsers
import collections
from django.utils.translation import ugettext as _
from adagios import userdata
def hosts(request, fields=None, **kwargs):
""" Get List of hosts. Any parameters will be passed straight throught to pynag.Utils.grep()
Arguments:
fields -- If specified, a list of attributes to return. If unspecified, all fields are returned.
Any **kwargs will be treated as a pynag.Utils.grep()-style filter
"""
return adagios.status.utils.get_hosts(request=request, fields=fields, **kwargs)
def services(request, fields=None, **kwargs):
""" Similar to hosts(), is a wrapper around adagios.status.utils.get_services()
"""
return adagios.status.utils.get_services(request=request, fields=fields, **kwargs)
def services_dt(request, fields=None, **kwargs):
""" Similar to hosts(), is a wrapper around adagios.status.utils.get_services()
"""
services = adagios.status.utils.get_services(request=request, fields='host_name,description')
result = {
'sEcho': len(services),
'iTotalRecords': len(services),
'aaData': []
}
for service in services:
result['aaData'].append(service.values())
return result
def contacts(request, fields=None, *args, **kwargs):
""" Wrapper around pynag.Parsers.mk_livestatus.get_contacts()
"""
l = adagios.status.utils.livestatus(request)
return l.get_contacts(*args, **kwargs)
def emails(request, *args, **kwargs):
""" Returns a list of all emails of all contacts
"""
l = adagios.status.utils.livestatus(request)
return map(lambda x: x['email'], l.get_contacts('Filter: email !='))
def acknowledge_many(hostlist, servicelist, sticky=1, notify=1, persistent=0, author="adagios", comment="acknowledged by Adagios"):
""" Same as acknowledge, but for acknowledge on many hosts services at a time.
Arguments:
hostlist -- string in the format of host1;host2;host3
servicelist -- string in the format of host1,service1;host2,service2
"""
items = []
for i in hostlist.split(';'):
if not i: continue
items.append((i, None))
for i in servicelist.split(';'):
if not i: continue
host_name,service_description = i.split(',')
items.append((host_name, service_description))
for i in items:
acknowledge(
host_name=i[0],
service_description=i[1],
sticky=sticky,
notify=notify,
persistent=persistent,
author=author,
comment=comment
)
return _("Success")
def acknowledge(host_name, service_description=None, sticky=1, notify=1, persistent=0, author='adagios', comment='acknowledged by Adagios'):
""" Acknowledge one single host or service check """
if service_description in (None, '', u'', '_HOST_'):
pynag.Control.Command.acknowledge_host_problem(host_name=host_name,
sticky=sticky,
notify=notify,
persistent=persistent,
author=author,
comment=comment,
)
else:
pynag.Control.Command.acknowledge_svc_problem(host_name=host_name,
service_description=service_description,
sticky=sticky,
notify=notify,
persistent=persistent,
author=author,
comment=comment,
)
def downtime_many(hostlist, servicelist, hostgrouplist, start_time=None, end_time=None, fixed=1, trigger_id=0, duration=7200, author='adagios', comment='Downtime scheduled by adagios', all_services_on_host=False, hostgroup_name=None):
""" Same as downtime, but for acknowledge on many hosts services at a time.
Arguments:
hostlist -- string in the format of host1;host2;host3
hostgrouplist -- string in the format of hostgroup1;hostgroup2;hostgroup3
servicelist -- string in the format of host1,service1;host2,service2
"""
items = []
for i in hostlist.split(';'):
if not i: continue
items.append((i, None, None))
for i in hostgrouplist.split(';'):
if not i: continue
items.append((None, None, i))
for i in servicelist.split(';'):
if not i: continue
host_name, service_description = i.split(',')
items.append((host_name, service_description, None))
for i in items:
host_name = i[0]
service_description = i[1]
hostgroup_name = i[2]
downtime(
host_name=host_name,
service_description=service_description,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
all_services_on_host=all_services_on_host,
hostgroup_name=hostgroup_name
)
def downtime(host_name=None, service_description=None, start_time=None, end_time=None, fixed=1, trigger_id=0, duration=7200, author='adagios', comment='Downtime scheduled by adagios', all_services_on_host=False, hostgroup_name=None):
""" Schedule downtime for a host or a service """
if fixed in (1, '1') and start_time in (None, ''):
start_time = time.time()
if fixed in (1, '1') and end_time in (None, ''):
end_time = int(start_time) + int(duration)
if all_services_on_host == 'false':
all_services_on_host = False
elif all_services_on_host == 'true':
all_services_on_host = True
# Check if we are supposed to schedule downtime for a whole hostgroup:
if hostgroup_name:
result1 = pynag.Control.Command.schedule_hostgroup_host_downtime(
hostgroup_name=hostgroup_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
),
result2 = pynag.Control.Command.schedule_hostgroup_svc_downtime(
hostgroup_name=hostgroup_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
return result1, result2
# Check if we are recursively scheduling downtime for host and all its services:
elif all_services_on_host:
result1 = pynag.Control.Command.schedule_host_svc_downtime(
host_name=host_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
),
result2 = pynag.Control.Command.schedule_host_downtime(
host_name=host_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
return result1, result2
# Otherwise, if this is a host
elif service_description in (None, '', u'', '_HOST_'):
return pynag.Control.Command.schedule_host_downtime(
host_name=host_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
# otherwise it must be a service:
else:
return pynag.Control.Command.schedule_svc_downtime(
host_name=host_name,
service_description=service_description,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
import adagios.utils
def reschedule_many(request, hostlist, servicelist, check_time=None, **kwargs):
""" Same as reschedule() but takes a list of hosts/services as input
Arguments:
hostlist -- semicolon seperated list of hosts to schedule checks for. Same as multiple calls with host_name=
servicelist -- Same as hostlist but for services. Format is: host_name,service_description;host_name,service_description
"""
#task = adagios.utils.Task()
#WaitCondition = "last_check > %s" % int(time.time()- 1)
for i in hostlist.split(';'):
if not i: continue
reschedule(request, host_name=i, service_description=None, check_time=check_time)
#task.add(wait, 'hosts', i, WaitCondition)
for i in servicelist.split(';'):
if not i: continue
host_name,service_description = i.split(',')
reschedule(request, host_name=host_name, service_description=service_description, check_time=check_time)
#WaitObject = "{h};{s}".format(h=host_name, s=service_description)
#task.add(wait, 'services', WaitObject, WaitCondition)
return {'message': _("command sent successfully")}
def reschedule(request, host_name=None, service_description=None, check_time=None, wait=0, hostlist='', servicelist=''):
""" Reschedule a check of this service/host
Arguments:
host_name -- Name of the host
service_description -- Name of the service check. If left empty, host check will be rescheduled
check_time -- timestamp of when to execute this check, if left empty, execute right now
wait -- If set to 1, function will not return until check has been rescheduled
"""
if check_time is None or check_time is '':
check_time = time.time()
if service_description in (None, '', u'', '_HOST_', 'undefined'):
service_description = ""
pynag.Control.Command.schedule_forced_host_check(
host_name=host_name, check_time=check_time)
if wait == "1":
livestatus = adagios.status.utils.livestatus(request)
livestatus.query("GET hosts",
"WaitObject: %s " % host_name,
"WaitCondition: last_check > %s" % check_time,
"WaitTrigger: check",
"Filter: host_name = %s" % host_name,
)
else:
pynag.Control.Command.schedule_forced_svc_check(
host_name=host_name, service_description=service_description, check_time=check_time)
if wait == "1":
livestatus = adagios.status.utils.livestatus(request)
livestatus.query("GET services",
"WaitObject: %s %s" % (
host_name, service_description),
"WaitCondition: last_check > %s" % check_time,
"WaitTrigger: check",
"Filter: host_name = %s" % host_name,
)
return "ok"
def comment(author, comment, host_name, service_description=None, persistent=1):
""" Adds a comment to a particular service.
If the "persistent" field is set to zero (0), the comment will be deleted the next time Nagios is restarted.
Otherwise, the comment will persist across program restarts until it is deleted manually. """
if service_description in (None, '', u'', '_HOST_'):
pynag.Control.Command.add_host_comment(
host_name=host_name, persistent=persistent, author=author, comment=comment)
else:
pynag.Control.Command.add_svc_comment(
host_name=host_name, service_description=service_description, persistent=persistent, author=author, comment=comment)
return "ok"
def delete_comment(comment_id, object_type=None, host_name=None, service_description=None):
"""
"""
if not host_name:
# TODO host_name is not used here, why do we need it ?
pass
if object_type == "host" or service_description in (None, '', u'', '_HOST_'):
pynag.Control.Command.del_host_comment(comment_id=comment_id)
else:
pynag.Control.Command.del_svc_comment(comment_id=comment_id)
return "ok"
def edit(object_type, short_name, attribute_name, new_value):
""" Change one single attribute for one single object.
Arguments:
object_type -- Type of object to change (i.e. "host","service", etc)
short_name -- Short Name of the object f.e. the host_name of a host
attribute_name -- Name of attribute to change .. f.e. 'address'
new_value -- New value of the object .. f.e. '127.0.0.1'
Examples:
edit('host','localhost','address','127.0.0.1')
edit('service', 'localhost/Ping', 'contactgroups', 'None')
"""
# TODO : MK Livestatus access acording to remote_user
c = pynag.Model.string_to_class[object_type]
my_obj = c.objects.get_by_shortname(short_name)
my_obj[attribute_name] = new_value
my_obj.save()
return str(my_obj)
def get_map_data(request, host_name=None):
""" Returns a list of (host_name,2d_coords). If host_name is provided, returns a list with only that host """
livestatus = adagios.status.utils.livestatus(request)
all_hosts = livestatus.query('GET hosts', )
hosts_with_coordinates = pynag.Model.Host.objects.filter(
**{'2d_coords__exists': True})
hosts = []
connections = []
for i in all_hosts:
name = i['name']
if host_name in (None, '', name):
# If x does not have any coordinates, break
coords = None
for x in hosts_with_coordinates:
if x.host_name == name:
coords = x['2d_coords']
break
if coords is None:
continue
tmp = coords.split(',')
if len(tmp) != 2:
continue
x, y = tmp
host = {}
host['host_name'] = name
host['state'] = i['state']
i['x_coordinates'] = x
i['y_coordinates'] = y
hosts.append(i)
# For all hosts that have network parents, lets return a proper line for
# those two
for i in hosts:
# Loop through all network parents. If network parent is also in our hostlist
# Then create a connection between the two
for parent in i.get('parents'):
for x in hosts:
if x.get('name') == parent:
connection = {}
connection['parent_x_coordinates'] = x.get('x_coordinates')
connection['parent_y_coordinates'] = x.get('y_coordinates')
connection['child_x_coordinates'] = i.get('x_coordinates')
connection['child_y_coordinates'] = i.get('y_coordinates')
connection['state'] = i.get('state')
connection['description'] = i.get('name')
connections.append(connection)
result = {}
result['hosts'] = hosts
result['connections'] = connections
return result
def change_host_coordinates(host_name, latitude, longitude):
""" Updates longitude and latitude for one specific host """
host = pynag.Model.Host.objects.get_by_shortname(host_name)
coords = "%s,%s" % (latitude, longitude)
host['2d_coords'] = coords
host.save()
def autocomplete(request, q):
""" Returns a list of {'hosts':[], 'hostgroups':[],'services':[]} matching search query q
"""
if q is None:
q = ''
result = {}
hosts = adagios.status.utils.get_hosts(request, host_name__contains=q)
services = adagios.status.utils.get_services(request, service_description__contains=q)
hostgroups = adagios.status.utils.get_hostgroups(request, hostgroup_name__contains=q)
result['hosts'] = sorted(set(map(lambda x: x['name'], hosts)))
result['hostgroups'] = sorted(set(map(lambda x: x['name'], hostgroups)))
result['services'] = sorted(set(map(lambda x: x['description'], services)))
return result
def delete_downtime(downtime_id, is_service=True):
""" Delete one specific downtime with id that matches downtime_id.
Arguments:
downtime_id -- Id of the downtime to be deleted
is_service -- If set to True or 1, then this is assumed to be a service downtime, otherwise assume host downtime
"""
if is_service in (True, 1, '1'):
pynag.Control.Command.del_svc_downtime(downtime_id)
else:
pynag.Control.Command.del_host_downtime(downtime_id)
return "ok"
def top_alert_producers(limit=5, start_time=None, end_time=None):
""" Return a list of ["host_name",number_of_alerts]
Arguments:
limit -- Limit output to top n hosts (default 5)
start_time -- Search log starting with start_time (default since last log rotation)
"""
if start_time == '':
start_time = None
if end_time == '':
end_time = None
l = pynag.Parsers.LogFiles()
log = l.get_state_history(start_time=start_time, end_time=end_time)
top_alert_producers = collections.defaultdict(int)
for i in log:
if 'host_name' in i and 'state' in i and i['state'] > 0:
top_alert_producers[i['host_name']] += 1
top_alert_producers = top_alert_producers.items()
top_alert_producers.sort(cmp=lambda a, b: cmp(a[1], b[1]), reverse=True)
if limit > len(top_alert_producers):
top_alert_producers = top_alert_producers[:int(limit)]
return top_alert_producers
def log_entries(*args, **kwargs):
""" Same as pynag.Parsers.Logfiles().get_log_entries()
Arguments:
start_time -- unix timestamp. if None, return all entries from today
end_time -- If specified, only fetch log entries older than this (unix timestamp)
strict -- If True, only return entries between start_time and end_time, if False,
-- then return entries that belong to same log files as given timeset
search -- If provided, only return log entries that contain this string (case insensitive)
kwargs -- All extra arguments are provided as filter on the log entries. f.e. host_name="localhost"
Returns:
List of dicts
"""
l = pynag.Parsers.LogFiles()
return l.get_log_entries(*args, **kwargs)
def state_history(start_time=None, end_time=None, object_type=None, host_name=None, service_description=None, hostgroup_name=None):
""" Returns a list of dicts, with the state history of hosts and services. Parameters behaves similar to get_log_entries
"""
if start_time == '':
start_time = None
if end_time == '':
end_time = None
if host_name == '':
host_name = None
if service_description == '':
service_description = None
l = pynag.Parsers.LogFiles()
log_entries = l.get_state_history(start_time=start_time, end_time=end_time, host_name=host_name, service_description=service_description)
if object_type == 'host' or object_type == 'service':
pass
elif object_type == 'hostgroup':
hg = pynag.Model.Hostgroup.objects.get_by_shortname(hostgroup_name)
hosts = hg.get_effective_hosts()
hostnames = map(lambda x: x.host_name, hosts)
log_entries = filter(lambda x: x['host_name'] in hostnames, log_entries)
else:
raise Exception(_("Unsupported object type: %s") % object_type)
# Add some css-hints for and duration of each state history entry as percent of duration
# this is used by all views that have state history and on top of it a progress bar which shows
# Up/downtime totals.
c = {'log': log_entries }
if len(c['log']) > 0:
log = c['log']
c['start_time'] = start_time = log[0]['time']
c['end_time'] = log[-1]['time']
now = time.time()
total_duration = now - start_time
css_hint = {}
css_hint[0] = 'success'
css_hint[1] = 'warning'
css_hint[2] = 'danger'
css_hint[3] = 'info'
for i in log:
i['duration_percent'] = 100 * i['duration'] / total_duration
i['bootstrap_status'] = css_hint[i['state']]
return log_entries
def _get_service_model(host_name, service_description=None):
""" Return one pynag.Model.Service object for one specific service as seen
from status point of view. That means it will do its best to return a service
that was assigned to hostgroup but the caller requested a specific host.
Returns:
pynag.Model.Service object
Raises:
KeyError if not found
"""
try:
return pynag.Model.Service.objects.get_by_shortname("%s/%s" % (host_name, service_description))
except KeyError, e:
host = pynag.Model.Host.objects.get_by_shortname(host_name)
for i in host.get_effective_services():
if i.service_description == service_description:
return i
raise e
def command_line(host_name, service_description=None):
""" Returns effective command line for a host or a service (i.e. resolves check_command) """
try:
obj = _get_host_or_service(host_name, service_description)
return obj.get_effective_command_line(host_name=host_name)
except KeyError:
return _("Could not resolve commandline. Object not found")
def _get_host_or_service(host_name, service_description=None):
""" Return a pynag.Model.Host or pynag.Model.Service or raise exception if none are found """
host = pynag.Model.Host.objects.get_by_shortname(host_name)
if not service_description or service_description == '_HOST_':
return host
else:
search_result = pynag.Model.Service.objects.filter(host_name=host_name, service_description=service_description)
if search_result:
return search_result[0]
# If no services were found, the service might be applied to a hostgroup
for service in host.get_effective_services():
if service.service_description == service_description:
return service
raise KeyError(_("Object not found"))
def update_check_command(host_name, service_description=None, **kwargs):
""" Saves all custom variables of a given service
"""
try:
for k, v in kwargs.items():
if service_description is None or service_description == '':
obj = pynag.Model.Host.objects.get_by_shortname(host_name)
else:
obj = pynag.Model.Service.objects.get_by_shortname(
"%s/%s" % (host_name, service_description))
if k.startswith("$_SERVICE") or k.startswith('$ARG') or k.startswith('$_HOST'):
obj.set_macro(k, v)
obj.save()
return _("Object saved")
except KeyError:
raise Exception(_("Object not found"))
def get_business_process_names():
""" Returns all configured business processes
"""
import adagios.businessprocess
return map(lambda x: x.name, adagios.businessprocess.get_all_processes())
def get(request, object_type, *args, **kwargs):
livestatus_arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
if not object_type.endswith('s'):
object_type += 's'
if 'name__contains' in kwargs and object_type == 'services':
name = str(kwargs['name__contains'])
livestatus_arguments = filter(
lambda x: x.startswith('name'), livestatus_arguments)
livestatus_arguments.append('Filter: host_name ~ %s' % name)
livestatus_arguments.append('Filter: description ~ %s' % name)
livestatus_arguments.append('Or: 2')
livestatus = adagios.status.utils.livestatus(request)
results = livestatus.query('GET %s' % object_type, *livestatus_arguments)
if object_type == 'service':
for i in results:
i['name'] = i.get('host_name') + "/" + i.get('description')
return results
def get_business_process(process_name=None, process_type=None):
""" Returns a list of all processes in json format.
If process_name is specified, return all sub processes.
"""
import adagios.bi
if not process_name:
processes = adagios.bi.get_all_processes()
else:
process = adagios.bi.get_business_process(str(process_name), process_type)
processes = process.get_processes()
result = []
# Turn processes into nice json
for i in processes:
json = {}
json['state'] = i.get_status()
json['name'] = i.name
json['display_name'] = i.display_name
json['subprocess_count'] = len(i.processes)
json['process_type'] = i.process_type
result.append(json)
return result
def remove_downtime(request, host_name, service_description=None, downtime_id=None):
""" Remove downtime for one specific host or service """
downtimes_to_remove = []
# If downtime_id is not provided, remove all downtimes of that service or host
if downtime_id:
downtimes_to_remove.append(downtime_id)
else:
livestatus = adagios.status.utils.livestatus(request)
query_parameters = list()
query_parameters.append('GET downtimes')
query_parameters.append('Filter: host_name = {host_name}'.format(**locals()))
if service_description:
query_parameters.append('Filter: service_description = {service_description}'.format(**locals()))
result = livestatus.query(*query_parameters)
for i in result:
downtime_id = i['id']
downtimes_to_remove.append(downtime_id)
if service_description:
for i in downtimes_to_remove:
pynag.Control.Command.del_svc_downtime(downtime_id=i)
else:
for i in downtimes_to_remove:
pynag.Control.Command.del_host_downtime(downtime_id=i)
return "ok"
def remove_acknowledgement(host_name, service_description=None):
""" Remove downtime for one specific host or service """
if not service_description:
pynag.Control.Command.remove_host_acknowledgement(host_name=host_name)
else:
pynag.Control.Command.remove_svc_acknowledgement(host_name=host_name, service_description=service_description)
return "ok"
def submit_check_result(request, host_name, service_description=None, autocreate=False, status_code=3, plugin_output=_("No message was entered"), performance_data=""):
""" Submit a passive check_result for a given host or a service
Arguments:
host_name -- Name of the host you want to submit check results for
service_description -- If provided, submit a result for service this service instead of a host
autocreate -- If this is set to True, and host/service does not exist. It will be created
status_code -- Nagios style status for the check (0,1,2,3 which means ok,warning,critical, etc)
plugin_output -- The text output of the check to display in a web interface
performance_data -- Optional, If there are any performance metrics to display
"""
livestatus = adagios.status.utils.livestatus(request)
result = {}
output = plugin_output + " | " + performance_data
if not service_description:
object_type = 'host'
args = pynag.Utils.grep_to_livestatus(host_name=host_name)
objects = livestatus.get_hosts(*args)
else:
object_type = 'service'
args = pynag.Utils.grep_to_livestatus(host_name=host_name, service_description=service_description)
objects = livestatus.get_services(*args)
if not objects and autocreate is True:
raise Exception(_("Autocreate not implemented yet"))
elif not objects:
result['error'] = 'No %s with that name' % object_type
else:
if object_type == 'host':
pynag.Control.Command.process_host_check_result(host_name, status_code, output)
else:
pynag.Control.Command.process_service_check_result(host_name, service_description, status_code, output)
result['message'] = _("Command has been submitted.")
return result
def statistics(request, **kwargs):
""" Returns a dict with various statistics on status data. """
return adagios.status.utils.get_statistics(request, **kwargs)
def metrics(request, **kwargs):
""" Returns a list of dicts which contain service perfdata metrics
"""
result = []
fields = "host_name description perf_data state host_state".split()
services = adagios.status.utils.get_services(request, fields=fields, **kwargs)
for service in services:
metrics = pynag.Utils.PerfData(service['perf_data']).metrics
metrics = filter(lambda x: x.is_valid(), metrics)
for metric in metrics:
metric_dict = {
'host_name': service['host_name'],
'service_description': service['description'],
'state': service['state'],
'host_state': service['host_state'],
'label': metric.label,
'value': metric.value,
'uom': metric.uom,
'warn': metric.warn,
'crit': metric.crit,
'min': metric.min,
'max': metric.max,
}
result.append(metric_dict)
return result
def metric_names(request, **kwargs):
""" Returns the names of all perfdata metrics that match selected request """
metric_names = set()
fields = "host_name description perf_data state host_state".split()
services = adagios.status.utils.get_services(request, fields=fields, **kwargs)
for service in services:
metrics = pynag.Utils.PerfData(service['perf_data']).metrics
metrics = filter(lambda x: x.is_valid(), metrics)
for metric in metrics:
metric_names.add(metric.label)
result = {
'services that match filter': len(services),
'filter': kwargs,
'metric_names': sorted(list(metric_names)),
}
return result
def wait(table, WaitObject, WaitCondition=None, WaitTrigger='check', **kwargs):
print _("Lets wait for"), locals()
if not WaitCondition:
WaitCondition = "last_check > %s" % int(time.time()-1)
livestatus = adagios.status.utils.livestatus(None)
print _("livestatus ok")
result = livestatus.get(table, 'Stats: state != 999', WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print _("ok no more waiting for "), WaitObject
return result
def wait_many(hostlist, servicelist, WaitCondition=None, WaitTrigger='check', **kwargs):
if not WaitCondition:
WaitCondition = "last_check > %s" % int(time.time()-1)
livestatus = adagios.status.utils.livestatus(None)
for host in hostlist.split(';'):
if not host:
continue
WaitObject = host
livestatus.get('hosts', WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print WaitObject
for service in servicelist.split(';'):
if not service:
continue
WaitObject = service.replace(',', ';')
livestatus.get('services', WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print WaitObject
def toggle_backend_visibility(request, backend_name):
""" Toggles a backend in user preferences.
Args:
request: a Django request
backend_name (str): The name of the backend.
"""
user = userdata.User(request)
if not user.disabled_backends:
user.disabled_backends = []
if backend_name in user.disabled_backends:
user.disabled_backends.remove(backend_name)
else:
user.disabled_backends.append(backend_name)
user.save()
########NEW FILE########
__FILENAME__ = adagiostags
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import math
from datetime import datetime, timedelta
from django import template
from django.utils.timesince import timesince
from django.utils.translation import ugettext as _
register = template.Library()
@register.filter("timestamp")
def timestamp(value):
try:
return datetime.fromtimestamp(value)
except AttributeError:
return ''
@register.filter("duration")
def duration(value):
""" Used as a filter, returns a human-readable duration.
'value' must be in seconds.
"""
zero = datetime.min
return timesince(zero, zero + timedelta(0, value))
@register.filter("hash")
def hash(h, key):
return h[key]
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import pynag.Parsers
import os
from django.test.client import RequestFactory
import adagios.status
import adagios.status.utils
import adagios.status.graphite
import adagios.settings
import adagios.utils
class LiveStatusTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.nagios_config = adagios.settings.nagios_config
cls.environment = adagios.utils.FakeAdagiosEnvironment()
cls.environment.create_minimal_environment()
cls.environment.configure_livestatus()
cls.environment.update_adagios_global_variables()
cls.environment.start()
cls.livestatus = cls.environment.get_livestatus()
cls.factory = RequestFactory()
@classmethod
def tearDownClass(cls):
cls.environment.terminate()
def testLivestatusConnectivity(self):
requests = self.livestatus.query('GET status', 'Columns: requests')
self.assertEqual(
1, len(requests), _("Could not get status.requests from livestatus"))
def testLivestatusConfigured(self):
config = pynag.Parsers.config(cfg_file=self.nagios_config)
config.parse_maincfg()
for k, v in config.maincfg_values:
if k == "broker_module" and v.find('livestatus') > 1:
tmp = v.split()
self.assertFalse(
len(tmp) < 2, _(' We think livestatus is incorrectly configured. In nagios.cfg it looks like this: %s') % v)
module_file = tmp[0]
socket_file = tmp[1]
self.assertTrue(
os.path.exists(module_file), _(' Livestatus Broker module not found at "%s". Is nagios correctly configured?') % module_file)
self.assertTrue(
os.path.exists(socket_file), _(' Livestatus socket file was not found (%s). Make sure nagios is running and that livestatus module is loaded') % socket_file)
return
self.assertTrue(
False, _('Nagios Broker module not found. Is livestatus installed and configured?'))
def testPageLoad(self):
""" Loads a bunch of status pages, looking for a crash """
self.loadPage('/status/')
self.loadPage('/status/hosts')
self.loadPage('/status/services')
self.loadPage('/status/contacts')
self.loadPage('/status/parents')
self.loadPage('/status/state_history')
self.loadPage('/status/log')
self.loadPage('/status/comments')
self.loadPage('/status/downtimes')
self.loadPage('/status/hostgroups')
self.loadPage('/status/servicegroups')
self.loadPage('/status/map')
self.loadPage('/status/dashboard')
def test_status_detail(self):
""" Tests for /status/detail """
tmp = self.loadPage('/status/detail?contact_name=nagiosadmin')
self.assertTrue('nagiosadmin belongs to the following' in tmp.content)
tmp = self.loadPage('/status/detail?host_name=ok_host')
self.assertTrue('ok_host' in tmp.content)
tmp = self.loadPage('/status/detail?host_name=ok_host&service_description=ok%20service%201')
self.assertTrue('ok_host' in tmp.content)
tmp = self.loadPage('/status/detail?contactgroup_name=admins')
self.assertTrue('nagiosadmin' in tmp.content)
def testStateHistory(self):
request = self.factory.get('/status/state_history')
adagios.status.views.state_history(request)
def loadPage(self, url, expected_status_code=200):
""" Load one specific page, and assert if return code is not 200 """
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, expected_status_code, _("Expected status code %(code)s for page %(url)s") % {'code': expected_status_code, 'url': url})
return response
def testSubmitCommand(self):
""" Test adagios.rest.status.submit_check_results
"""
c = Client()
data = {}
data['host_name'] = 'adagios test host'
data['service_description'] = 'nonexistant'
data['status_code'] = "0"
data['plugin_output'] = 'test message'
data['performance_data'] = ''
response = c.post('/rest/status/json/submit_check_result', data=data)
self.assertEqual(200, response.status_code)
class Graphite(unittest.TestCase):
def test__get_graphite_url(self):
""" Smoketest for adagios.status.graphite._get_graphite_url() """
base = "http://localhost/graphite"
host = "localhost"
service = "Ping"
metric = "packetloss"
from_ = "-1d"
parameters = locals()
parameters.pop('self', None)
result = adagios.status.graphite._get_graphite_url(**parameters)
self.assertTrue(result.startswith(base))
self.assertTrue(host in result)
self.assertTrue(service in result)
self.assertTrue(metric in result)
def test_get(self):
""" Smoketest for adagios.status.graphite.get() """
base = "http://localhost/graphite"
host = "localhost"
service = "Ping"
metrics = ["packetloss", "rta"]
units = [("test", "test", "-1d")]
parameters = locals()
parameters.pop('self', None)
result = adagios.status.graphite.get(**parameters)
self.assertTrue(result)
self.assertTrue(len(result) == 1)
self.assertTrue('rta' in result[0]['metrics'])
self.assertTrue('packetloss' in result[0]['metrics'])
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
url(r'^/?$', 'status.views.status_index'),
url(r'^/acknowledgements/?$', 'status.views.acknowledgement_list'),
url(r'^/error/?$', 'status.views.error_page'),
url(r'^/comments/?$', 'status.views.comment_list'),
url(r'^/contacts/?$', 'status.views.contact_list'),
url(r'^/contactgroups/?$', 'status.views.contactgroups'),
url(r'^/dashboard/?$', 'status.views.dashboard'),
url(r'^/detail/?$', 'status.views.detail'),
url(r'^/downtimes/?$', 'status.views.downtime_list'),
url(r'^/hostgroups/?$', 'status.views.status_hostgroups'),
url(r'^/hosts/?$', 'status.views.hosts'),
url(r'^/log/?$', 'status.views.log'),
url(r'^/map/?', 'status.views.map_view'),
url(r'^/parents/?$', 'status.views.network_parents'),
url(r'^/perfdata/?$', 'status.views.perfdata'),
url(r'^/perfdata2/?$', 'status.views.perfdata2'),
url(r'^/problems/?$', 'status.views.problems'),
url(r'^/servicegroups/?$', 'status.views.status_servicegroups'),
url(r'^/services/?$', 'status.views.services'),
url(r'^/state_history/?$', 'status.views.state_history'),
url(r'^/backends/?$', 'status.views.backends'),
# Misc snippets
url(r'^/snippets/log/?$', 'status.views.snippets_log'),
url(r'^/snippets/services/?$', 'status.views.snippets_services'),
url(r'^/snippets/hosts/?$', 'status.views.snippets_hosts'),
# Misc tests
url(r'^/test/services/?$', 'status.views.services_js'),
url(r'^/test/status_dt/?$', 'status.views.status_dt'),
url(r'^/test/livestatus/?$', 'status.views.test_livestatus'),
# Deprecated as of 2013-03-23
url(r'^/contacts/(?P<contact_name>.+)/?$', 'status.views.contact_detail'),
url(r'^/hostgroups/(?P<hostgroup_name>.+)/?$', 'status.views.status_hostgroup'),
url(r'^/contactgroups/(?P<contactgroup_name>.+)/?$', 'status.views.contactgroup_detail'),
url(r'^/servicegroups/(?P<servicegroup_name>.+)/?$', 'status.views.servicegroup_detail'),
url(r'^/services_old/?$', 'status.views.status'),
)
########NEW FILE########
__FILENAME__ = utils
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Utility functions for the status app. These are mostly used by
# adagios.status.views
import pynag.Utils
import pynag.Parsers
import adagios.settings
from adagios.misc.rest import add_notification, clear_notification
import simplejson as json
from collections import defaultdict
from adagios import userdata
state = defaultdict(lambda: "unknown")
state[0] = "ok"
state[1] = "warning"
state[2] = "critical"
def get_all_backends():
# TODO: Properly support multiple instances, using split here is not a good idea
backends = adagios.settings.livestatus_path or ''
backends = backends.split(',')
backends = map(lambda x: x.strip(), backends)
return backends
def livestatus(request):
""" Returns a new pynag.Parsers.mk_livestatus() object with authauser automatically set from request.META['remoteuser']
"""
if request is None:
authuser = None
elif adagios.settings.enable_authorization and not adagios.auth.has_role(request, 'administrators') and not adagios.auth.has_role(request, 'operators'):
authuser = request.META.get('REMOTE_USER', None)
else:
authuser = None
backends = get_all_backends()
# we remove the disabled backends
if backends is not None:
try:
user = userdata.User(request)
if user.disabled_backends is not None:
backends = filter(lambda x: x not in user.disabled_backends, backends)
clear_notification("userdata problem")
except Exception as e:
message = "%s: %s" % (type(e), str(e))
add_notification(level="warning", notification_id="userdata problem", message=message)
livestatus = pynag.Parsers.MultiSite(
nagios_cfg_file=adagios.settings.nagios_config,
livestatus_socket_path=adagios.settings.livestatus_path,
authuser=authuser)
for i in backends:
livestatus.add_backend(path=i, name=i)
return livestatus
def query(request, *args, **kwargs):
""" Wrapper around pynag.Parsers.mk_livestatus().query(). Any authorization logic should be performed here. """
l = livestatus(request)
return l.query(*args, **kwargs)
def get_hostgroups(request, *args, **kwargs):
""" Get a list of hostgroups from mk_livestatus
"""
l = livestatus(request)
return l.get_hostgroups(*args, **kwargs)
def get_hosts(request, tags=None, fields=None, *args, **kwargs):
""" Get a list of hosts from mk_livestatus
This is a wrapper around pynag.Parsers.mk_livestatus().query()
Arguments:
request - Not in use
tags - Not in use
fields - If fields=None, return all columns, otherwise return only the columns provided
Any *args will be passed directly to livestatus
Any **kwargs will be converted to livestatus "'Filter:' style strings
Returns:
A list of dict (hosts)
"""
if 'q' in kwargs:
q = kwargs.get('q')
del kwargs['q']
if not isinstance(q, list):
q = [q]
else:
q = []
# Often search filters include description, which we will skip
kwargs.pop('description', None)
if 'host_state' in kwargs:
kwargs['state'] = kwargs.pop('host_state')
# If keyword "unhandled" is in kwargs, then we will fetch unhandled
# hosts only
if 'unhandled' in kwargs:
del kwargs['unhandled']
kwargs['state'] = 1
kwargs['acknowledged'] = 0
kwargs['scheduled_downtime_depth'] = 0
#kwargs['host_scheduled_downtime_depth'] = 0
#kwargs['host_acknowledged'] = 0
arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
# if "q" came in from the querystring, lets filter on host_name
for i in q:
arguments.append('Filter: name ~~ %s' % i)
arguments.append('Filter: address ~~ %s' % i)
arguments.append('Filter: plugin_output ~~ %s' % i)
arguments.append('Or: 3')
if fields is None:
fields = [
'name', 'plugin_output', 'last_check', 'state', 'services', 'services_with_info', 'services_with_state',
'parents', 'childs', 'address', 'last_state_change', 'acknowledged', 'downtimes', 'comments_with_info',
'scheduled_downtime_depth', 'num_services_crit', 'num_services_warn', 'num_services_unknown',
'num_services_ok', 'num_services_pending']
# fields should be a list, lets create a Column: query for livestatus
if isinstance(fields, (str, unicode)):
fields = fields.split(',')
if len(fields) > 0:
argument = 'Columns: %s' % (' '.join(fields))
arguments.append(argument)
l = livestatus(request)
result = l.get_hosts(*arguments)
# Add statistics to every hosts:
for host in result:
try:
host['num_problems'] = host['num_services_crit'] + \
host['num_services_warn'] + host['num_services_unknown']
host['children'] = host['services_with_state']
if host.get('last_state_change') == 0:
host['state'] = 3
host['status'] = state[host['state']]
ok = host.get('num_services_ok')
warn = host.get('num_services_warn')
crit = host.get('num_services_crit')
pending = host.get('num_services_pending')
unknown = host.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
host['total'] = total
host['problems'] = warn + crit + unknown
try:
total = float(total)
host['health'] = float(ok) / total * 100.0
host['percent_ok'] = ok / total * 100
host['percent_warn'] = warn / total * 100
host['percent_crit'] = crit / total * 100
host['percent_unknown'] = unknown / total * 100
host['percent_pending'] = pending / total * 100
except ZeroDivisionError:
host['health'] = 'n/a'
except Exception:
pass
# Sort by host and service status
result.sort(reverse=True, cmp=lambda a, b: cmp(a.get('num_problems'), b.get('num_problems')))
result.sort(reverse=True, cmp=lambda a, b: cmp(a.get('state'), b.get('state')))
return result
def get_services(request=None, tags=None, fields=None, *args, **kwargs):
""" Get a list of services from mk_livestatus.
This is a wrapper around pynag.Parsers.mk_livestatus().query()
Arguments:
requests - Not in use
tags - List of 'tags' that will be passed on as a filter to the services.
Example of service tags are: problem, unhandled, ishandled,
fields - If fields=None, return all columns, otherwise return only the columns provided.
fields can be either a list or a comma seperated string
Any *args will be passed directly to livestatus
Any **kwargs passed in will be converted to livestatus 'Filter:' strings
Examples:
get_services(host_name='localhost') # same as livestatus.query('GET services','Filter: host_name = localhost')
get_services('Authuser: admin', host_name='localhost')
"""
if 'q' in kwargs:
q = kwargs.get('q')
del kwargs['q']
else:
q = []
if not isinstance(q, list):
q = [q]
# If keyword "unhandled" is in kwargs, then we will fetch unhandled
# services only
if 'unhandled' in kwargs:
del kwargs['unhandled']
kwargs['state__isnot'] = 0
kwargs['acknowledged'] = 0
kwargs['scheduled_downtime_depth'] = 0
kwargs['host_scheduled_downtime_depth'] = 0
kwargs['host_acknowledged'] = 0
kwargs['host_state'] = 0
arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
# If q was added, it is a fuzzy filter on services
for i in q:
arguments.append('Filter: host_name ~~ %s' % i)
arguments.append('Filter: description ~~ %s' % i)
arguments.append('Filter: plugin_output ~~ %s' % i)
arguments.append('Filter: host_address ~~ %s' % i)
arguments.append('Or: 4')
if fields is None:
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state', 'scheduled_downtime_depth',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
# fields should be a list, lets create a Column: query for livestatus
if isinstance(fields, (str, unicode)):
fields = fields.split(',')
if len(fields) > 0:
argument = 'Columns: %s' % (' '.join(fields))
arguments.append(argument)
l = livestatus(request)
result = l.get_services(*arguments)
# Add custom tags to our service list
try:
for service in result:
# Tag the service with tags such as problems and unhandled
service_tags = []
if service['state'] != 0:
service_tags.append('problem')
service_tags.append('problems')
if service['acknowledged'] == 0 and service['downtimes'] == [] and service['host_downtimes'] == []:
service_tags.append('unhandled')
service['unhandled'] = "unhandled"
else:
service_tags.append('ishandled')
service['handled'] = "handled"
elif service.get('last_state_change') == 0:
service['state'] = 3
service_tags.append('pending')
else:
service_tags.append('ok')
if service['acknowledged'] == 1:
service_tags.append('acknowledged')
if service['downtimes'] != []:
service_tags.append('downtime')
service['tags'] = ' '.join(service_tags)
service['status'] = state[service['state']]
if isinstance(tags, str):
tags = [tags]
if isinstance(tags, list):
result = pynag.Utils.grep(result, tags__contains=tags)
except Exception:
pass
return result
def get_contacts(request, *args, **kwargs):
l = livestatus(request)
return l.get_contacts(*args, **kwargs)
def get_contactgroups(request, *args, **kwargs):
l = livestatus(request)
return l.get_contactgroups(*args, **kwargs)
def get_statistics(request, *args, **kwargs):
""" Return a list of dict. That contains various statistics from mk_livestatus (like service totals and host totals)
"""
c = {}
l = livestatus(request)
arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
# Get service totals as an array of [ok,warn,crit,unknown]
c['service_totals'] = l.get_services(
'Stats: state = 0',
'Stats: state = 1',
'Stats: state = 2',
'Stats: state = 3',
*arguments
) or [0, 0, 0, 0]
# Get host totals as an array of [up,down,unreachable]
c['host_totals'] = l.get_hosts(
'Stats: state = 0',
'Stats: state = 1',
'Stats: state = 2',
*arguments
) or [0, 0, 0]
# Get total number of host/ host_problems
c['total_hosts'] = sum(c['host_totals'])
c['total_host_problems'] = c['total_hosts'] - c['host_totals'][0]
# Get total number of services/ service_problems
c['total_services'] = sum(c['service_totals'])
c['total_service_problems'] = c['total_services'] - c['service_totals'][0]
# Calculate percentage of hosts/services that are "ok"
try:
c['service_totals_percent'] = map(lambda x: float(100.0 * x / c['total_services']), c['service_totals'])
except ZeroDivisionError:
c['service_totals_percent'] = [0, 0, 0, 0]
try:
c['host_totals_percent'] = map(lambda x: float(100.0 * x / c['total_hosts']), c['host_totals'])
except ZeroDivisionError:
c['host_totals_percent'] = [0, 0, 0, 0]
unhandled_services = l.get_services(
'Stats: state > 0',
acknowledged=0,
scheduled_downtime_depth=0,
host_state=0,
*arguments
) or [0]
unhandled_hosts = l.get_hosts(
'Stats: state = 1',
acknowledged=0,
scheduled_downtime_depth=0,
*arguments
) or [0]
c['unhandled_services'] = unhandled_services[0]
c['unhandled_hosts'] = unhandled_hosts[0]
total_unhandled_network_problems = l.get_hosts(
'Filter: childs != ',
'Stats: state = 1',
acknowledged=0,
scheduled_downtime_depth=0,
*arguments
) or [0]
c['total_unhandled_network_problems'] = total_unhandled_network_problems[0]
tmp = l.get_hosts(
'Filter: childs != ',
'Stats: state >= 0',
'Stats: state > 0',
*arguments
) or [0, 0]
c['total_network_parents'], c['total_network_problems'] = tmp
return c
def grep_to_livestatus(object_type, *args, **kwargs):
""" Take querystring parameters from django request object, and returns list of livestatus queries
Should support both hosts and services.
It does minimal support for views have hosts and services in same view and user wants to
enter some querystring parameters for both.
"""
result = []
for key in kwargs:
if hasattr(kwargs, 'getlist'):
values = kwargs.getlist(key)
else:
values = [kwargs.get(key)]
if object_type == 'host' and key.startswith('service_'):
continue
if object_type == 'host' and key == 'description':
continue
if object_type == 'host' and key in ('host_scheduled_downtime_depth', 'host_acknowledged', 'host_state'):
key = key[len('host_'):]
if object_type == 'service' and key in ('service_state', 'service_description'):
key = key[len('service_'):]
if object_type == 'service' and key == 'unhandled':
tmp = {}
tmp['state__isnot'] = 0
tmp['acknowledged'] = 0
tmp['scheduled_downtime_depth'] = 0
tmp['host_scheduled_downtime_depth'] = 0
tmp['host_acknowledged'] = 0
tmp['host_state'] = 0
result += pynag.Utils.grep_to_livestatus(**kwargs)
elif object_type == 'host' and key == 'unhandled':
tmp = {}
tmp['state__isnot'] = 0
tmp['acknowledged'] = 0
tmp['scheduled_downtime_depth'] = 0
elif object_type == 'host' and key == 'q':
for i in values:
result.append('Filter: name ~~ %s' % i)
result.append('Filter: address ~~ %s' % i)
result.append('Filter: plugin_output ~~ %s' % i)
result.append('Or: 3')
elif object_type == 'service' and key == 'q':
for i in values:
result.append('Filter: host_name ~~ %s' % i)
result.append('Filter: description ~~ %s' % i)
result.append('Filter: plugin_output ~~ %s' % i)
result.append('Filter: host_address ~~ %s' % i)
result.append('Or: 4')
else:
for value in values:
result += pynag.Utils.grep_to_livestatus(**{key: value})
return list(args) + result
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.http import HttpResponse
import time
from os.path import dirname
from collections import defaultdict
import json
import traceback
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.utils.encoding import smart_str
from django.core.context_processors import csrf
from django.utils.translation import ugettext as _
import pynag.Model
import pynag.Utils
import pynag.Control
import pynag.Plugins
import pynag.Model.EventHandlers
from pynag.Parsers import ParserError
import adagios.settings
from adagios.pnp.functions import run_pnp
from adagios.status import utils
import adagios.status.rest
import adagios.status.forms
import adagios.businessprocess
from django.core.urlresolvers import reverse
from adagios.status import graphite
state = defaultdict(lambda: "unknown")
state[0] = "ok"
state[1] = "warning"
state[2] = "critical"
from adagios.views import adagios_decorator, error_page
@adagios_decorator
def detail(request):
""" Return status detail view for a single given host, hostgroup,service, contact, etc """
host_name = request.GET.get('host_name')
service_description = request.GET.get('service_description')
contact_name = request.GET.get('contact_name')
hostgroup_name = request.GET.get('hostgroup_name')
contactgroup_name = request.GET.get('contactgroup_name')
servicegroup_name = request.GET.get('servicegroup_name')
if service_description:
return service_detail(request, host_name=host_name, service_description=service_description)
elif host_name:
return host_detail(request, host_name=host_name)
elif contact_name:
return contact_detail(request, contact_name=contact_name)
elif contactgroup_name:
return contactgroup_detail(request, contactgroup_name=contactgroup_name)
elif hostgroup_name:
return hostgroup_detail(request, hostgroup_name=hostgroup_name)
elif servicegroup_name:
return servicegroup_detail(request, servicegroup_name=servicegroup_name)
raise Exception(_("You have to provide an item via querystring so we know what to give you details for"))
@adagios_decorator
def status_parents(request):
""" Here for backwards compatibility """
return network_parents(request)
@adagios_decorator
def network_parents(request):
""" List of hosts that are network parents """
c = {}
c['messages'] = []
authuser = request.GET.get('contact_name', None)
livestatus = utils.livestatus(request)
fields = "name childs state scheduled_downtime_depth address last_check last_state_change acknowledged downtimes services services_with_info".split()
hosts = utils.get_hosts(request, 'Filter: childs !=', fields=fields, **request.GET)
host_dict = {}
map(lambda x: host_dict.__setitem__(x['name'], x), hosts)
c['hosts'] = []
for i in hosts:
if i['childs']:
c['hosts'].append(i)
ok = 0
crit = 0
i['child_hosts'] = []
for x in i['childs']:
i['child_hosts'].append(host_dict[x])
if host_dict[x]['state'] == 0:
ok += 1
else:
crit += 1
total = float(len(i['childs']))
i['health'] = float(ok) / total * 100.0
i['percent_ok'] = ok / total * 100
i['percent_crit'] = crit / total * 100
return render_to_response('status_parents.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status(request):
""" Compatibility layer around status.views.services
"""
# return render_to_response('status.html', c, context_instance=RequestContext(request))
# Left here for compatibility reasons:
return services(request)
@adagios_decorator
def services(request):
""" This view handles list of services """
c = {}
c['messages'] = []
c['errors'] = []
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
c['services'] = utils.get_services(request, fields=fields, **request.GET)
return render_to_response('status_services.html', c, context_instance=RequestContext(request))
@adagios_decorator
def services_js(request):
""" This view handles list of services """
c = {}
c['messages'] = []
c['errors'] = []
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
c['services'] = json.dumps(utils.get_services(request, fields=fields, **request.GET))
return render_to_response('status_services_js.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status_dt(request):
""" This view handles list of services """
c = {}
return render_to_response('status_dt.html', c, context_instance=RequestContext(request))
@adagios_decorator
def snippets_services(request):
""" Returns a html stub with only the services view """
c = {}
c['messages'] = []
c['errors'] = []
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
c['services'] = utils.get_services(request, fields=fields, **request.GET)
return render_to_response('snippets/status_servicelist_snippet.html', c, context_instance=RequestContext(request))
@adagios_decorator
def snippets_hosts(request):
c = {}
c['messages'] = []
c['errors'] = []
c['hosts'] = utils.get_hosts(request, **request.GET)
c['host_name'] = request.GET.get('detail', None)
return render_to_response('snippets/status_hostlist_snippet.html', c, context_instance=RequestContext(request))
@adagios_decorator
def snippets_log(request):
""" Returns a html stub with the snippet_statehistory_snippet.html
"""
host_name = request.GET.get('host_name')
service_description = request.GET.get('service_description')
hostgroup_name = request.GET.get('hostgroup_name')
if service_description == "_HOST_":
service_description = None
l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
log = l.get_state_history(host_name=host_name, service_description=service_description)
# If hostgroup_name was specified, lets get all log entries that belong to that hostgroup
if host_name and service_description:
object_type = 'service'
elif hostgroup_name:
object_type = "hostgroup"
hg = pynag.Model.Hostgroup.objects.get_by_shortname(hostgroup_name)
hosts = hg.get_effective_hosts()
hostnames = map(lambda x: x.host_name, hosts)
log = filter(lambda x: x['host_name'] in hostnames, log)
elif host_name:
object_type = "host"
else:
raise Exception(_("Need either a host_name or hostgroup_name parameter"))
c = {'log':log}
c['object_type'] = object_type
# Create some state history progress bar from our logs:
if len(c['log']) > 0:
log = c['log']
c['start_time'] = start_time = log[0]['time']
c['end_time'] = end_time = log[-1]['time']
now = time.time()
total_duration = now - start_time
state_hist = []
start = start_time
last_item = None
css_hint = {}
css_hint[0] = 'success'
css_hint[1] = 'warning'
css_hint[2] = 'danger'
css_hint[3] = 'unknown'
for i in log:
i['duration_percent'] = 100 * i['duration'] / total_duration
i['bootstrap_status'] = css_hint[i['state']]
return render_to_response('snippets/status_statehistory_snippet.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def host_detail(request, host_name):
""" Return status detail view for a single host """
return service_detail(request, host_name=host_name, service_description=None)
@adagios_decorator
def service_detail(request, host_name, service_description):
""" Displays status details for one host or service """
c = {}
c['messages'] = []
c['errors'] = []
livestatus = utils.livestatus(request)
backend = request.GET.get('backend')
c['pnp_url'] = adagios.settings.pnp_url
c['nagios_url'] = adagios.settings.nagios_url
c['request'] = request
now = time.time()
seconds_in_a_day = 60 * 60 * 24
seconds_passed_today = now % seconds_in_a_day
today = now - seconds_passed_today # midnight of today
try:
c['host'] = my_host = livestatus.get_host(host_name, backend)
my_host['object_type'] = 'host'
my_host['short_name'] = my_host['name']
except IndexError:
c['errors'].append(_("Could not find any host named '%s'") % host_name)
return error_page(request, c)
if service_description is None:
tmp = request.GET.get('service_description')
if tmp is not None:
return service_detail(request, host_name, service_description=tmp)
primary_object = my_host
c['service_description'] = '_HOST_'
#c['log'] = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config).get_state_history(
# host_name=host_name, service_description=None)
else:
try:
c['service'] = my_service = livestatus.get_service(
host_name, service_description, backend=backend)
my_service['object_type'] = 'service'
c['service_description'] = service_description
my_service['short_name'] = "%s/%s" % (
my_service['host_name'], my_service['description'])
primary_object = my_service
#c['log'] = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config).get_state_history(
# host_name=host_name, service_description=service_description)
except IndexError:
c['errors'].append(
_("Could not find any service named '%s'") % service_description)
return error_page(request, c)
c['my_object'] = primary_object
c['object_type'] = primary_object['object_type']
# Friendly statusname (i.e. turn 2 into "critical")
primary_object['status'] = state[primary_object['state']]
# Plugin longoutput comes to us with special characters escaped. lets undo
# that:
primary_object['long_plugin_output'] = primary_object[
'long_plugin_output'].replace('\\n', '\n')
# Service list on the sidebar should be sorted
my_host['services_with_info'] = sorted(
my_host.get('services_with_info', []))
c['host_name'] = host_name
perfdata = primary_object['perf_data']
perfdata = pynag.Utils.PerfData(perfdata)
for i, datum in enumerate(perfdata.metrics):
datum.i = i
try:
datum.status = state[datum.get_status()]
except pynag.Utils.PynagError:
datum.status = state[3]
c['perfdata'] = perfdata.metrics
# Get a complete list of network parents
try:
c['network_parents'] = reversed(_get_network_parents(request, host_name))
except Exception, e:
c['errors'].append(e)
# Lets get some graphs
try:
tmp = run_pnp("json", host=host_name)
tmp = json.loads(tmp)
except Exception, e:
tmp = []
c['pnp4nagios_error'] = e
c['graph_urls'] = tmp
if adagios.settings.enable_graphite:
metrics = [x.label for x in perfdata.metrics]
service = c['service_description'].replace(' ', '_')
c['graphite'] = graphite.get(adagios.settings.graphite_url,
c['host_name'],
service,
metrics,
adagios.settings.GRAPHITE_PERIODS,
)
# used in the General tab - preview
for graph in c['graphite']:
if graph['css_id'] == adagios.settings.GRAPHITE_DEFAULT_TAB:
default = {}
for k,v in graph['metrics'].items():
default[k] = v
c['graphite_default'] = default
return render_to_response('status_detail.html', c, context_instance=RequestContext(request))
def _get_network_parents(request, host_name):
""" Returns a list of hosts that are network parents (or grandparents) to host_name
Every item in the list is a host dictionary from mk_livestatus
Returns:
List of lists
Example:
_get_network_parents('remotehost.example.com')
[
['gateway.example.com', 'mod_gearman.example.com'],
['localhost'],
]
"""
result = []
backend = request.GET.get('backend', None)
livestatus = adagios.status.utils.livestatus(request)
if isinstance(host_name, unicode):
host_name = smart_str(host_name)
if isinstance(host_name, str):
host = livestatus.get_host(host_name, backend)
elif isinstance(host_name, dict):
host = host_name
else:
raise KeyError(
'host_name must be str or dict (got %s)' % type(host_name))
parent_names = host['parents']
while len(parent_names) > 0:
parents = map(lambda x: livestatus.get_host(x, backend), parent_names)
# generate a list of grandparent names:
grand_parents = set()
for i in parents:
map(lambda x: grand_parents.add(x), i.get('parents'))
result.append(parents)
parent_names = list(grand_parents)
return result
@adagios_decorator
def hostgroup_detail(request, hostgroup_name):
""" Status detail for one specific hostgroup """
c = {}
c['messages'] = []
c['errors'] = []
c['hostgroup_name'] = hostgroup_name
c['object_type'] = 'hostgroup'
livestatus = adagios.status.utils.livestatus(request)
my_hostgroup = pynag.Model.Hostgroup.objects.get_by_shortname(
hostgroup_name)
c['my_hostgroup'] = livestatus.get_hostgroups(
'Filter: name = %s' % hostgroup_name)[0]
_add_statistics_to_hostgroups([c['my_hostgroup']])
# Get information about child hostgroups
subgroups = my_hostgroup.hostgroup_members or ''
subgroups = subgroups.split(',')
if subgroups == ['']:
subgroups = []
c['hostgroups'] = map(lambda x: livestatus.get_hostgroups('Filter: name = %s' % x)[0], subgroups)
_add_statistics_to_hostgroups(c['hostgroups'])
return render_to_response('status_hostgroup.html', c, context_instance=RequestContext(request))
def _add_statistics_to_hostgroups(hostgroups):
""" Enriches a list of hostgroup dicts with information about subgroups and parentgroups
"""
# Lets establish a good list of all hostgroups and parentgroups
all_hostgroups = pynag.Model.Hostgroup.objects.all
all_subgroups = set() # all hostgroups that belong in some other hostgroup
# "subgroup":['master1','master2']
hostgroup_parentgroups = defaultdict(set)
hostgroup_childgroups = pynag.Model.ObjectRelations.hostgroup_hostgroups
for hostgroup, subgroups in hostgroup_childgroups.items():
map(lambda x: hostgroup_parentgroups[x].add(hostgroup), subgroups)
for i in hostgroups:
i['child_hostgroups'] = hostgroup_childgroups[i['name']]
i['parent_hostgroups'] = hostgroup_parentgroups[i['name']]
# Extra statistics for our hostgroups
for hg in hostgroups:
ok = hg.get('num_services_ok')
warn = hg.get('num_services_warn')
crit = hg.get('num_services_crit')
pending = hg.get('num_services_pending')
unknown = hg.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
hg['total'] = total
hg['problems'] = warn + crit + unknown
try:
total = float(total)
hg['health'] = float(ok) / total * 100.0
hg['health'] = float(ok) / total * 100.0
hg['percent_ok'] = ok / total * 100
hg['percent_warn'] = warn / total * 100
hg['percent_crit'] = crit / total * 100
hg['percent_unknown'] = unknown / total * 100
hg['percent_pending'] = pending / total * 100
except ZeroDivisionError:
pass
@adagios_decorator
def status_servicegroups(request):
c = {}
c['messages'] = []
c['errors'] = []
servicegroup_name = None
livestatus = utils.livestatus(request)
servicegroups = livestatus.get_servicegroups()
c['servicegroup_name'] = servicegroup_name
c['request'] = request
c['servicegroups'] = servicegroups
return render_to_response('status_servicegroups.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status_hostgroups(request):
c = {}
c['messages'] = []
c['errors'] = []
hostgroup_name = None
livestatus = utils.livestatus(request)
hostgroups = livestatus.get_hostgroups()
c['hostgroup_name'] = hostgroup_name
c['request'] = request
# Lets establish a good list of all hostgroups and parentgroups
all_hostgroups = pynag.Model.Hostgroup.objects.all
all_subgroups = set() # all hostgroups that belong in some other hostgroup
# "subgroup":['master1','master2']
hostgroup_parentgroups = defaultdict(set)
hostgroup_childgroups = pynag.Model.ObjectRelations.hostgroup_hostgroups
for hostgroup, subgroups in hostgroup_childgroups.items():
map(lambda x: hostgroup_parentgroups[x].add(hostgroup), subgroups)
for i in hostgroups:
i['child_hostgroups'] = hostgroup_childgroups[i['name']]
i['parent_hostgroups'] = hostgroup_parentgroups[i['name']]
if hostgroup_name is None:
# If no hostgroup was specified. Lets only show "root hostgroups"
c['hosts'] = livestatus.get_hosts()
my_hostgroups = []
for i in hostgroups:
if len(i['parent_hostgroups']) == 0:
my_hostgroups.append(i)
my_hostgroups.sort()
c['hostgroups'] = my_hostgroups
else:
my_hostgroup = pynag.Model.Hostgroup.objects.get_by_shortname(
hostgroup_name)
subgroups = my_hostgroup.hostgroup_members or ''
subgroups = subgroups.split(',')
# Strip out any group that is not a subgroup of hostgroup_name
right_hostgroups = []
for group in hostgroups:
if group.get('name', '') in subgroups:
right_hostgroups.append(group)
c['hostgroups'] = right_hostgroups
# If a hostgroup was specified lets also get all the hosts for it
c['hosts'] = livestatus.query(
'GET hosts', 'Filter: host_groups >= %s' % hostgroup_name)
for host in c['hosts']:
ok = host.get('num_services_ok')
warn = host.get('num_services_warn')
crit = host.get('num_services_crit')
pending = host.get('num_services_pending')
unknown = host.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
host['total'] = total
host['problems'] = warn + crit + unknown
try:
total = float(total)
host['health'] = float(ok) / total * 100.0
host['percent_ok'] = ok / total * 100
host['percent_warn'] = warn / total * 100
host['percent_crit'] = crit / total * 100
host['percent_unknown'] = unknown / total * 100
host['percent_pending'] = pending / total * 100
except ZeroDivisionError:
host['health'] = 'n/a'
# Extra statistics for our hostgroups
for hg in c['hostgroups']:
ok = hg.get('num_services_ok')
warn = hg.get('num_services_warn')
crit = hg.get('num_services_crit')
pending = hg.get('num_services_pending')
unknown = hg.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
hg['total'] = total
hg['problems'] = warn + crit + unknown
try:
total = float(total)
hg['health'] = float(ok) / total * 100.0
hg['health'] = float(ok) / total * 100.0
hg['percent_ok'] = ok / total * 100
hg['percent_warn'] = warn / total * 100
hg['percent_crit'] = crit / total * 100
hg['percent_unknown'] = unknown / total * 100
hg['percent_pending'] = pending / total * 100
except ZeroDivisionError:
pass
return render_to_response('status_hostgroups.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status_host(request):
""" Here for backwards compatibility """
return hosts(request)
@adagios_decorator
def hosts(request):
c = {}
c['messages'] = []
c['errors'] = []
c['hosts'] = utils.get_hosts(request, **request.GET)
c['host_name'] = request.GET.get('detail', None)
return render_to_response('status_host.html', c, context_instance=RequestContext(request))
@adagios_decorator
def problems(request):
c = {}
c['messages'] = []
c['errors'] = []
search_filter = request.GET.copy()
if 'state__isnot' not in search_filter and 'state' not in search_filter:
search_filter['state__isnot'] = '0'
c['hosts'] = utils.get_hosts(request, **search_filter)
c['services'] = utils.get_services(request, **search_filter)
return render_to_response('status_problems.html', c, context_instance=RequestContext(request))
def get_related_objects(object_id):
my_object = pynag.Model.ObjectDefinition.objects.get_by_id(object_id)
result = []
if my_object.register == '0':
result += my_object.get_effective_children()
return result
if my_object.object_type == 'hostgroup':
result += my_object.get_effective_hostgroups()
result += my_object.get_effective_hosts()
if my_object.object_type == 'contactgroup':
result += my_object.get_effective_contactgroups()
result += my_object.get_effective_contacts()
if my_object.object_type == 'host':
result += my_object.get_effective_network_children()
result += my_object.get_effective_services()
return result
def _add_statistics_to_hosts(hosts):
""" Takes a list of dict hosts, and adds to the list statistics
Following is an example of attributes added to the dicts:
num_services_ok
num_services_warn
problems (number of problems)
health (percent of services ok)
percent_problems
"""
for host in hosts:
ok = host.get('num_services_ok')
warn = host.get('num_services_warn')
crit = host.get('num_services_crit')
pending = host.get('num_services_pending')
unknown = host.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
host['total'] = total
host['problems'] = warn + crit + unknown
host['num_problems'] = warn + crit + unknown
try:
total = float(total)
host['health'] = float(ok) / total * 100.0
host['percent_ok'] = ok / total * 100
host['percent_warn'] = warn / total * 100
host['percent_crit'] = crit / total * 100
host['percent_unknown'] = unknown / total * 100
host['percent_pending'] = pending / total * 100
except ZeroDivisionError:
host['health'] = 'n/a'
host['percent_ok'] = 0
host['percent_warn'] = 0
host['percent_crit'] = 0
host['percent_unknown'] = 0
host['percent_pending'] = 0
@adagios_decorator
def status_index(request):
c = adagios.status.utils.get_statistics(request)
c['services'] = adagios.status.utils.get_services(request, 'unhandled')
#c['top_alert_producers'] = adagios.status.rest.top_alert_producers(limit=5)
return render_to_response('status_index.html', c, context_instance=RequestContext(request))
@adagios_decorator
def test_livestatus(request):
""" This view is a test on top of mk_livestatus which allows you to enter your own queries """
c = {}
c['messages'] = []
c['table'] = table = request.GET.get('table')
livestatus = adagios.status.utils.livestatus(request)
if table is not None:
columns = livestatus.query('GET columns', 'Filter: table = %s' % table)
c['columns'] = columns
columns = ""
limit = request.GET.get('limit')
run_query = False
for k, v in request.GET.items():
if k == "submit":
run_query = True
if k.startswith('check_'):
columns += " " + k[len("check_"):]
# Any columns checked means we return a query
query = ['GET %s' % table]
if len(columns) > 0:
query.append("Columns: %s" % columns)
if limit != '' and limit > 0:
query.append("Limit: %s" % limit)
if run_query is True:
c['results'] = livestatus.query(*query)
c['query'] = livestatus.last_query
c['header'] = c['results'][0].keys()
return render_to_response('test_livestatus.html', c, context_instance=RequestContext(request))
def _status_combined(request, optimized=False):
""" Returns a combined status of network outages, host problems and service problems
If optimized is True, fewer attributes are loaded it, makes it run faster but with less data
"""
c = {}
livestatus = adagios.status.utils.livestatus(request)
if optimized == True:
hosts = livestatus.get_hosts(
'Columns: name state acknowledged downtimes childs parents')
services = livestatus.get_services(
'Columns: host_name description state acknowledged downtimes host_state')
else:
hosts = livestatus.get_hosts()
services = livestatus.get_services()
hosts_that_are_down = []
hostnames_that_are_down = []
service_status = [0, 0, 0, 0]
host_status = [0, 0, 0, 0]
parents = []
for host in hosts:
host_status[host["state"]] += 1
if len(host['childs']) > 0:
parents.append(host)
if host['state'] != 0 and host['acknowledged'] == 0 and host['downtimes'] == []:
hostnames_that_are_down.append(host['name'])
hosts_that_are_down.append(host)
network_problems = []
host_problems = []
service_problems = []
# Do nothing if host parent is also down.
for host in hosts_that_are_down:
for i in host['parents']:
if i in hostnames_that_are_down:
break
else:
if len(host['childs']) == 0:
host_problems.append(host)
else:
network_problems.append(host)
for service in services:
service_status[service["state"]] += 1
if service['state'] != 0 and service['acknowledged'] == 0 and len(service['downtimes']) == 0 and service['host_state'] == 0:
service_problems.append(service)
c['network_problems'] = network_problems
c['host_problems'] = host_problems
c['service_problems'] = service_problems
c['hosts'] = hosts
c['services'] = services
c['parents'] = parents
service_totals = float(sum(service_status))
host_totals = float(sum(host_status))
if service_totals == 0:
c['service_status'] = 0
else:
c['service_status'] = map(
lambda x: 100 * x / service_totals, service_status)
if host_totals == 0:
c['host_status'] = 0
else:
c['host_status'] = map(lambda x: 100 * x / host_totals, host_status)
#l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
#c['log'] = reversed(l.get_state_history())
return c
@adagios_decorator
def status_problems(request):
return dashboard(request)
@adagios_decorator
def dashboard(request):
# Get statistics
c = adagios.status.utils.get_statistics(request)
c['messages'] = []
c['errors'] = []
c['host_problems'] = utils.get_hosts(request, state='1', unhandled='', **request.GET)
# Service problems
c['service_problems'] = utils.get_services(request, host_state="0", unhandled='', **request.GET)
# Sort problems by state and last_check as secondary sort field
c['service_problems'].sort(
reverse=True, cmp=lambda a, b: cmp(a['last_check'], b['last_check']))
c['service_problems'].sort(
reverse=True, cmp=lambda a, b: cmp(a['state'], b['state']))
return render_to_response('status_dashboard.html', c, context_instance=RequestContext(request))
@adagios_decorator
def state_history(request):
c = {}
c['messages'] = []
c['errors'] = []
livestatus = adagios.status.utils.livestatus(request)
start_time = request.GET.get('start_time', None)
end_time = request.GET.get('end_time', None)
if end_time is None:
end_time = time.time()
end_time = int(float(end_time))
if start_time is None:
seconds_in_a_day = 60 * 60 * 24
seconds_today = end_time % seconds_in_a_day # midnight of today
start_time = end_time - seconds_today
start_time = int(start_time)
l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
c['log'] = log = l.get_state_history(start_time=start_time, end_time=end_time,strict=False)
total_duration = end_time - start_time
c['total_duration'] = total_duration
css_hint = {}
css_hint[0] = 'success'
css_hint[1] = 'warning'
css_hint[2] = 'danger'
css_hint[3] = 'info'
last_item = None
services = {}
search_filter = request.GET.copy()
search_filter.pop('start_time', None)
search_filter.pop('end_time', None)
search_filter.pop('start_time_picker', None)
search_filter.pop('start_hours', None)
search_filter.pop('end_time_picker', None)
search_filter.pop('end_hours', None)
search_filter.pop('submit', None)
log = pynag.Utils.grep(log, **search_filter)
for i in log:
short_name = "%s/%s" % (i['host_name'], i['service_description'])
if short_name not in services:
s = {}
s['host_name'] = i['host_name']
s['service_description'] = i['service_description']
s['log'] = []
s['worst_logfile_state'] = 0
#s['log'] = [{'time':start_time,'state':3, 'plugin_output':'Unknown value here'}]
services[short_name] = s
services[short_name]['log'].append(i)
services[short_name]['worst_logfile_state'] = max(
services[short_name]['worst_logfile_state'], i['state'])
for service in services.values():
last_item = None
service['sla'] = float(0)
service['num_problems'] = 0
service['duration'] = 0
for i in service['log']:
i['bootstrap_status'] = css_hint[i['state']]
if i['time'] < start_time:
i['time'] = start_time
if last_item is not None:
last_item['end_time'] = i['time']
#last_item['time'] = max(last_item['time'], start_time)
last_item['duration'] = duration = last_item[
'end_time'] - last_item['time']
last_item['duration_percent'] = 100 * float(
duration) / total_duration
service['duration'] += last_item['duration_percent']
if last_item['state'] == 0:
service['sla'] += last_item['duration_percent']
else:
service['num_problems'] += 1
last_item = i
if not last_item is None:
last_item['end_time'] = end_time
last_item['duration'] = duration = last_item[
'end_time'] - last_item['time']
last_item['duration_percent'] = 100 * duration / total_duration
service['duration'] += last_item['duration_percent']
if last_item['state'] == 0:
service['sla'] += last_item['duration_percent']
else:
service['num_problems'] += 1
c['services'] = services
c['start_time'] = start_time
c['end_time'] = end_time
return render_to_response('state_history.html', c, context_instance=RequestContext(request))
def _status_log(request):
""" Helper function to any status view that requires log access """
c = {}
c['messages'] = []
c['errors'] = []
start_time = request.GET.get('start_time', '')
end_time = request.GET.get('end_time', '')
host_name = request.GET.get('host_name', '')
service_description = request.GET.get('service_description', '')
limit = request.GET.get('limit', '')
if end_time == '':
end_time = None
else:
end_time = float(end_time)
if start_time == '':
now = time.time()
seconds_in_a_day = 60 * 60 * 24
seconds_today = now % seconds_in_a_day # midnight of today
start_time = now - seconds_today
else:
start_time = float(start_time)
if limit == '':
limit = 2000
else:
limit = int(limit)
# Any querystring parameters we will treat as a search string to get_log_entries, but we need to massage them
# a little bit first
kwargs = {}
for k, v in request.GET.items():
if k == 'search':
k = 'search'
elif k in (
'start_time', 'end_time', 'start_time_picker', 'end_time_picker', 'limit',
'start_hours', 'end_hours'):
continue
elif v is None or len(v) == 0:
continue
k = str(k)
v = str(v)
kwargs[k] = v
l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
c['log'] = l.get_log_entries(
start_time=start_time, end_time=end_time, **kwargs)[-limit:]
c['log'].reverse()
c['logs'] = {'all': []}
for line in c['log']:
if line['class_name'] not in c['logs'].keys():
c['logs'][line['class_name']] = []
c['logs'][line['class_name']].append(line)
c['logs']['all'].append(line)
c['start_time'] = start_time
c['end_time'] = end_time
return c
@adagios_decorator
def log(request):
c = _status_log(request)
c['request'] = request
c['log'].reverse()
return render_to_response('status_log.html', c, context_instance=RequestContext(request))
@adagios_decorator
def comment_list(request):
""" Display a list of all comments """
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
args = pynag.Utils.grep_to_livestatus(**request.GET)
c['comments'] = l.query('GET comments', *args)
return render_to_response('status_comments.html', c, context_instance=RequestContext(request))
@adagios_decorator
def downtime_list(request):
""" Display a list of all comments """
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
args = pynag.Utils.grep_to_livestatus(**request.GET)
c['downtimes'] = l.query('GET downtimes', *args)
return render_to_response('status_downtimes.html', c, context_instance=RequestContext(request))
@adagios_decorator
def acknowledgement_list(request):
""" Display a list of all comments """
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
args = pynag.Utils.grep_to_livestatus(**request.GET)
c['acknowledgements'] = l.query('GET comments', 'Filter: entry_type = 4', *args)
return render_to_response('status_acknowledgements.html', c, context_instance=RequestContext(request))
@adagios_decorator
def perfdata(request):
""" Display a list of perfdata
"""
c = {}
c['messages'] = []
c['errors'] = []
fields = "host_name description perf_data state host_state scheduled_downtime_depth host_scheduled_downtime_depth host_acknowledged acknowledged downtimes host_downtimes".split()
perfdata = utils.get_services(request, fields=fields, **request.GET)
for i in perfdata:
metrics = pynag.Utils.PerfData(i['perf_data']).metrics
metrics = filter(lambda x: x.is_valid(), metrics)
i['metrics'] = metrics
c['perfdata'] = perfdata
return render_to_response('status_perfdata.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contact_list(request):
""" Display a list of active contacts
"""
c = {}
c['messages'] = []
c['errors'] = []
c['contacts'] = adagios.status.utils.get_contacts(request, **request.GET)
return render_to_response('status_contacts.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contact_detail(request, contact_name):
""" Detailed information for one specific contact
"""
c = {}
c['messages'] = []
c['errors'] = []
c['contact_name'] = contact_name
l = adagios.status.utils.livestatus(request)
backend = request.GET.get('backend', None)
# Fetch contact and basic information
try:
contact = l.get_contact(contact_name, backend)
c['contact'] = contact
except IndexError:
raise Exception("Contact named '%s' was not found." % contact_name)
# Active comments
c['comments'] = l.query(
'GET comments', 'Filter: comment ~ %s' % contact_name,)
for i in c['comments']:
if i.get('type') == 1:
i['state'] = i['host_state']
else:
i['state'] = i['service_state']
# Services this contact can see
c['services'] = l.query(
'GET services', "Filter: contacts >= %s" % contact_name)
# Activity log
c['log'] = pynag.Parsers.LogFiles(
maincfg=adagios.settings.nagios_config).get_log_entries(search=str(contact_name))
# Contact groups
c['groups'] = l.query(
'GET contactgroups', 'Filter: members >= %s' % contact_name)
# Git audit logs
nagiosdir = dirname(adagios.settings.nagios_config or pynag.Model.config.guess_cfg_file())
git = pynag.Utils.GitRepo(directory=nagiosdir)
c['gitlog'] = git.log(author_name=contact_name)
return render_to_response('status_contact.html', c, context_instance=RequestContext(request))
@adagios_decorator
def map_view(request):
c = {}
livestatus = adagios.status.utils.livestatus(request)
c['hosts'] = livestatus.get_hosts()
c['map_center'] = adagios.settings.map_center
c['map_zoom'] = adagios.settings.map_zoom
return render_to_response('status_map.html', c, context_instance=RequestContext(request))
@adagios_decorator
def servicegroup_detail(request, servicegroup_name):
""" Detailed information for one specific servicegroup """
c = {}
c['messages'] = []
c['errors'] = []
c['servicegroup_name'] = servicegroup_name
search_conditions = request.GET.copy()
search_conditions.pop('servicegroup_name')
c['services'] = adagios.status.utils.get_services(request, groups__has_field=servicegroup_name, **search_conditions)
return render_to_response('status_servicegroup.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contactgroups(request):
""" Display a list of active contacts
"""
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
c['contactgroups'] = l.get_contactgroups(**request.GET)
return render_to_response('status_contactgroups.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contactgroup_detail(request, contactgroup_name):
""" Detailed information for one specific contactgroup
"""
c = {}
c['messages'] = []
c['errors'] = []
c['contactgroup_name'] = contactgroup_name
l = adagios.status.utils.livestatus(request)
# Fetch contact and basic information
result = l.query("GET contactgroups", "Filter: name = %s" %
contactgroup_name)
if result == []:
c['errors'].append(
"Contactgroup named '%s' was not found." % contactgroup_name)
else:
contactgroup = result[0]
c['contactgroup'] = contactgroup
# Services this contact can see
c['services'] = l.query(
'GET services', "Filter: contact_groups >= %s" % contactgroup_name)
# Services this contact can see
c['hosts'] = l.query(
'GET hosts', "Filter: contact_groups >= %s" % contactgroup_name)
# Contact groups
#c['contacts'] = l.query('GET contacts', 'Filter: contactgroup_ >= %s' % contact_name)
return render_to_response('status_contactgroup.html', c, context_instance=RequestContext(request))
@adagios_decorator
def perfdata2(request):
""" Just a test method, feel free to remove it
"""
c = {}
c['messages'] = []
c['errors'] = []
columns = 'Columns: host_name description perf_data state host_state'
l = adagios.status.utils.livestatus(request)
# User can specify from querystring a filter of which services to fetch
# we convert querystring into livestatus filters.
# User can also specify specific metrics to watch, so we extract from
# querystring as well
querystring = request.GET.copy()
interesting_metrics = querystring.pop('metrics', [''])[0].strip(',')
arguments = pynag.Utils.grep_to_livestatus(**querystring)
if not arguments:
services = []
else:
services = l.query('GET services', columns, *arguments)
# If no metrics= was specified on querystring, we take the string
# from first service in our search result
if not interesting_metrics and services:
metric_set = set()
for i in services:
perfdata = pynag.Utils.PerfData(i.get('perf_data', ''))
map(lambda x: metric_set.add(x.label), perfdata.metrics)
interesting_metrics = sorted(list(metric_set))
else:
interesting_metrics = interesting_metrics.split(',')
# Iterate through all the services and parse perfdata
for service in services:
perfdata = pynag.Utils.PerfData(service['perf_data'])
null_metric = pynag.Utils.PerfDataMetric()
metrics = map(lambda x: perfdata.get_perfdatametric(
x) or null_metric, interesting_metrics)
#metrics = filter(lambda x: x.is_valid(), metrics)
service['metrics'] = metrics
c['metrics'] = interesting_metrics
c['services'] = services
return render_to_response('status_perfdata2.html', c, context_instance=RequestContext(request))
def acknowledge(request):
""" Acknowledge
"""
if request.method != 'POST':
raise Exception("Only use POST to this url")
sticky = request.POST.get('sticky', 1)
persistent = request.POST.get('persistent', 0)
author = request.META.get('REMOTE_USER', 'anonymous')
comment = request.POST.get('comment', 'acknowledged by Adagios')
hostlist = request.POST.getlist('host', [])
servicelist = request.POST.getlist('service', [])
@adagios_decorator
def status_hostgroup(request, hostgroup_name):
""" Here for backwards compatibility """
return hostgroup_detail(request, hostgroup_name=hostgroup_name)
@adagios_decorator
def status_detail(request):
""" Here for backwards compatibility """
return detail(request)
@adagios_decorator
def backends(request):
""" Display a list of available backends and their connection status """
livestatus = adagios.status.utils.livestatus(request)
backends = livestatus.get_backends()
for i, v in backends.items():
v.test(raise_error=False)
return render_to_response('status_backends.html', locals(), context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.static import serve
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
url(r'^$', 'adagios.views.index', name="home"),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}, name="media"),
url(r'^403', 'adagios.views.http_403'),
url(r'^objectbrowser', include('adagios.objectbrowser.urls')),
url(r'^misc', include('adagios.misc.urls')),
url(r'^pnp', include('adagios.pnp.urls')),
url(r'^media(?P<path>.*)$', serve, {'document_root': settings.MEDIA_ROOT }),
url(r'^rest', include('adagios.rest.urls')),
url(r'^contrib', include('adagios.contrib.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
# Internationalization
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog'),
)
########NEW FILE########
__FILENAME__ = userdata
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Matthieu Caneill <matthieu.caneill@savoirfairelinux.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import json
import collections
import settings
class User(object):
""" Handles authentified users, provides preferences management. """
def __init__(self, request, autosave=False):
""" Instantiates one user's preferences.
Args:
request (Request): The incoming Django request.
Kwargs:
autosave (bool): if True, preferences are automatically saved.
"""
self._request = request
self._autosave = autosave
try:
self._username = request.META.get('REMOTE_USER', 'anonymous')
except Exception:
self._username = 'anonymous'
self._conffile = self._get_prefs_location()
self._check_path(self._conffile)
# sets the preferences as attributes:
for k, v in self._get_conf().iteritems():
self.__dict__[k] = v
def _check_path(self, path):
""" Checks the userdata folder, try to create it if it doesn't
exist."""
folder = os.path.dirname(path)
# does the folder exist?
if not os.path.isdir(folder):
try:
os.makedirs(folder)
except:
raise Exception("Folder %s can't be created. Be sure Adagios "
"has write access on its parent." % folder)
def _get_prefs_location(self):
""" Returns the location of the preferences file of the
specified user. """
try:
user_prefs_path = settings.USER_PREFS_PATH
except:
raise Exception('You must define USER_PREFS_PATH in settings.py')
return os.path.join(user_prefs_path, self._username + '.json')
def _get_default_conf(self):
try:
d = settings.PREFS_DEFAULT
except:
d = dict()
return d
def _get_conf(self):
""" Returns the json preferences for the specified user. """
try:
with open(self._conffile) as f:
conf = json.loads(f.read())
except IOError:
conf = self._get_default_conf()
except ValueError:
conf = self._get_default_conf()
return conf
def __getattr__(self, name):
""" Provides None as a default value. """
if name not in self.__dict__.keys():
return None
return self.__dict__[name]
def __setattr__(self, name, value):
""" Saves the preferences if autosave is set. """
self.__dict__[name] = value
if self._autosave and not name.startswith('_'):
self.save()
def set_pref(self, name, value):
""" Explicitly sets a user preference. """
self.__dict__[name] = value
def to_dict(self):
d = {}
for k in filter(lambda x: not(x.startswith('_')), self.__dict__.keys()):
d[k] = self.__dict__[k]
return d
def save(self):
""" Saves the preferences in JSON format. """
d = self.to_dict()
try:
with open(self._conffile, 'w') as f:
f.write(json.dumps(d))
except IOError:
raise Exception("Couldn't write settings into file %s. Be sure to "
"have write permissions on the parent folder."
% self._conffile)
self.trigger_hooks()
def trigger_hooks(self):
""" Triggers the hooks when preferences are changed. """
# language preference
from django.utils import translation
try:
self._request.session['django_language'] = self.language
# newer versions of Django: s/django_language/_language
translation.activate(self.language)
except Exception as e:
pass
########NEW FILE########
__FILENAME__ = utils
#!/usr/bin/env python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import multiprocessing
import adagios.status.utils
import time
import adagios
import pynag.Model
import adagios.exceptions
import adagios.settings
import os
import pynag.Utils.misc
from django.utils.translation import ugettext as _
def wait(object_type, WaitObject, WaitCondition, WaitTrigger, **kwargs):
livestatus = adagios.status.utils.livestatus(None)
livestatus.get(object_type, WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print WaitObject
def wait_for_objects(object_type, object_list, condition=None, trigger='check'):
if not condition:
condition = "last_check > %s" % int(0)
callback = lambda x: wait(object_type, WaitObject=x, WaitCondition=condition, WaitTrigger=trigger)
for WaitObject in object_list:
callback(WaitObject)
def wait_for_service(host_name, service_description, condition='last_check >= 0', trigger='check'):
livestatus = adagios.status.utils.livestatus(None)
waitobject = "%s;%s" % (host_name, service_description)
livestatus.get_services(
host_name=host_name,
service_description=service_description,
WaitCondition=condition,
WaitObject=waitobject
)
from multiprocessing.pool import ThreadPool
class Task(object):
def __init__(self, num_processes=5):
self._tasks = []
adagios.tasks.append(self)
self._pool = ThreadPool(processes=num_processes)
def add(self, function, *args, **kwargs):
print "Adding Task:", locals()
result = self._pool.apply_async(function, args, kwargs)
self._tasks.append(result)
#print result.get()
def status(self):
all_tasks = self._tasks
for i in all_tasks:
print i.ready()
completed_tasks = filter(lambda x: x.ready(), all_tasks)
return "{done}/{total} done.".format(done=len(completed_tasks), total=len(all_tasks))
def get_id(self):
return hash(self)
def ready(self):
""" Returns True if all the Tasks in this class have finished running. """
return max(map(lambda x: x.ready(), self._tasks))
def update_eventhandlers(request):
""" Iterates through all pynag eventhandler and informs them who might be making a change
"""
remote_user = request.META.get('REMOTE_USER', 'anonymous')
for i in pynag.Model.eventhandlers:
i.modified_by = remote_user
# if okconfig is installed, make sure okconfig is notified of git
# settings
try:
from pynag.Utils import GitRepo
import okconfig
okconfig.git = GitRepo(directory=os.path.dirname(
adagios.settings.nagios_config), auto_init=False, author_name=remote_user)
except Exception:
pass
def get_available_themes():
""" Returns a tuple with the name of themes that are available in media/theme directory """
theme_dir = os.path.join(adagios.settings.MEDIA_ROOT, adagios.settings.THEMES_FOLDER)
result = []
for root, dirs, files in os.walk(theme_dir):
if adagios.settings.THEME_ENTRY_POINT in files:
result.append(os.path.basename(root))
return result
def reload_config_file(adagios_configfile=None):
""" Reloads adagios.conf and populates updates adagios.settings accordingly.
Args:
adagios_configfile: Full path to adagios.conf. If None then use settings.adagios_configfile
"""
if not adagios_configfile:
adagios_configfile = adagios.settings.adagios_configfile
# Using execfile might not be optimal outside strict settings.py usage, but
# lets do things exactly like settings.py does it.
execfile(adagios_configfile)
config_values = locals()
adagios.settings.__dict__.update(config_values)
class FakeAdagiosEnvironment(pynag.Utils.misc.FakeNagiosEnvironment):
_adagios_settings_copy = None
def __init__(self, *args, **kwargs):
super(FakeAdagiosEnvironment, self).__init__(*args, **kwargs)
def update_adagios_global_variables(self):
""" Updates common adagios.settings to point to a temp directory.
If you are are doing unit tests which require specific changes, feel free to update
adagios.settings manually after calling this method.
"""
self._adagios_settings_copy = adagios.settings.__dict__.copy()
adagios.settings.adagios_configfile = self.adagios_config_file
adagios.settings.USER_PREFS_PATH = self.adagios_config_dir + "/userdata"
adagios.settings.nagios_config = self.cfg_file
adagios.settings.livestatus_path = self.livestatus_socket_path
reload_config_file(self.adagios_config_file)
def restore_adagios_global_variables(self):
""" Restores adagios.settings so it looks like before update_adagios_global_variables() was called
"""
adagios.settings.__dict__.clear()
adagios.settings.__dict__.update(self._adagios_settings_copy)
def create_minimal_environment(self):
""" Behaves like FakeNagiosEnvironment except also creates adagios config directory """
super(FakeAdagiosEnvironment, self).create_minimal_environment()
self.adagios_config_dir = os.path.join(self.tempdir, 'adagios')
self.adagios_config_file = os.path.join(self.adagios_config_dir, 'adagios.conf')
os.makedirs(self.adagios_config_dir)
with open(self.adagios_config_file, 'w') as f:
f.write('')
def terminate(self):
""" Behaves like FakeNagiosEnvironment except also restores adagios.settings module """
if self._adagios_settings_copy:
self.restore_adagios_global_variables()
super(FakeAdagiosEnvironment, self).terminate()
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.http import HttpResponse
import traceback
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext, loader
from django import template
from django.utils.translation import ugettext as _
import time
import logging
import adagios.settings
import adagios.utils
from adagios.exceptions import AccessDenied
def adagios_decorator(view_func):
""" This is a python decorator intented for all views in the status module.
It catches all unhandled exceptions and displays them on a generic web page.
Kind of what the django exception page does when debug mode is on.
"""
def wrapper(request, *args, **kwargs):
start_time = time.time()
try:
if request.method == 'POST':
adagios.utils.update_eventhandlers(request)
result = view_func(request, *args, **kwargs)
end_time = time.time()
time_now = time.ctime()
duration = end_time - start_time
return result
except Exception, e:
c = {}
c['exception'] = str(e)
c['exception_type'] = str(type(e).__name__)
c['traceback'] = traceback.format_exc()
return error_page(request, context=c)
wrapper.__name__ = view_func.__name__
wrapper.__module__ = view_func.__module__
return wrapper
def error_page(request, context=None):
if context is None:
context = {}
context['errors'] = []
context['errors'].append('Error occured, but no error messages provided, what happened?')
if request.META.get('CONTENT_TYPE') == 'application/json':
context.pop('request', None)
content = str(context)
response = HttpResponse(content=content, content_type='application/json')
else:
response = render_to_response('status_error.html', context, context_instance=RequestContext(request))
response.status_code = 500
return response
def index(request):
""" This view is our frontpage """
# If status view is enabled, redirect to frontpage of the status page:
if adagios.settings.enable_status_view:
return redirect('adagios.status.views.status_index', permanent=True)
else:
return redirect('objectbrowser', permanent=True)
def http_403(request, exception=None):
context = {}
context['exception'] = exception
if request.META.get('CONTENT_TYPE') == 'application/json':
c = {}
c['exception_type'] = exception.__class__
c['message'] = str(exception.message)
c['access_required'] = exception.access_required
response = HttpResponse(content=str(c), content_type='application/json')
else:
response = render_to_response('403.html', context, context_instance=RequestContext(request))
response.status_code = 403
return response
########NEW FILE########
__FILENAME__ = wsgi
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'adagios.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
########NEW FILE########
__FILENAME__ = static_businessprocess
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <palli@opensource.is>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
static_businessprocesses .. This script loads a business process and staticly writes html view for it
"""
#source_template = "/usr/lib/python2.6/site-packages/adagios/status/templates/business_process_view.html"
source_template = "/etc/adagios/pages.d/bi_process.html"
destination_directory = "/var/www/iceland.adagios.org"
pnp_parameters = "&graph_width=350&graph_height=30"
import os
os.environ['DJANGO_SETTINGS_MODULE'] = "adagios.settings"
import simplejson as json
from django.shortcuts import render
from django import template
from django.test.client import Client
from optparse import OptionParser
import adagios.bi
import django.http
from adagios.pnp.functions import run_pnp
# Start by parsing some arguments
parser = OptionParser(usage="usage: %prog [options]", version="%prog 1.0")
parser.add_option('--all', help="Parse all business processes", dest="all", action="store_true", default=False)
parser.add_option('--graphs', help="", dest="graphs", action="store_true", default=False)
parser.add_option('--destination', help="destination to write static html into", dest="destination", default=destination_directory)
parser.add_option('--source-template', help="Source template used to render business processes", dest="source", default=source_template)
parser.add_option('--verbose', help="verbose output", dest="verbose", action="store_true", default=False)
(options, args) = parser.parse_args()
def verbose(message):
if options.verbose:
print message
def businessprocess_to_html(process_name, process_type='businessprocess'):
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
verbose("Rendering business process %s" % bp.name)
c = {}
c['bp'] = bp
c['csrf_token'] = ''
c['graphs_url'] = "graphs.json"
c['static'] = True
directory = "%s/%s" % (options.destination, bp.name)
if not os.path.exists(directory):
os.makedirs(directory)
if options.graphs:
graphs = bi_graphs_to_json(process_name, process_type)
for i in graphs:
url = i.get('image_url')
client = Client()
verbose("Saving image %s" % url)
image = client.get("/pnp/image?%s&%s" % (url, pnp_parameters)).content
graph_filename = "%s/%s.png" % (directory, url)
open(graph_filename, 'w').write(image)
graph_json_file = "%s/graphs.json" % (directory)
for i in graphs:
i['image_url'] = i['image_url'] + '.png'
graph_json = json.dumps(graphs, indent=4)
open(graph_json_file, 'w').write(graph_json)
content = open(options.source, 'r').read()
t = template.Template(content)
c = template.Context(c)
html = t.render(c)
destination_file = "%s/index.html" % directory
open(destination_file, 'w').write(html.encode('utf-8'))
def bi_graphs_to_json(process_name, process_type='businessprocess'):
c = {}
c['messages'] = []
c['errors'] = []
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
graphs = []
if not bp.graphs:
return []
for graph in bp.graphs or []:
if graph.get('graph_type') == 'pnp':
host_name = graph.get('host_name')
service_description = graph.get('service_description')
metric_name = graph.get('metric_name')
pnp_result = run_pnp('json', host=graph.get('host_name'), srv=graph.get('service_description'))
json_data = json.loads(pnp_result)
for i in json_data:
if i.get('ds_name') == graph.get('metric_name'):
notes = graph.get('notes')
last_value = bp.get_pnp_last_value(host_name, service_description, metric_name)
i['last_value'] = last_value
i['notes'] = notes
graphs.append(i)
return graphs
if options.all:
processlist = adagios.bi.get_all_process_names()
else:
processlist = args
if not processlist:
parser.error("Either provide business process name or specify --all")
for i in processlist:
print "doing ", i
businessprocess_to_html(i)
########NEW FILE########
| [
"dyangUCI@github.com"
] | dyangUCI@github.com |
0e40086e71cd625c8401d64c50458cd0a2ac5370 | c44a219f2465db28abdbe7b147a13e77109f8573 | /pygameGUI.py | 37c16ad6ef298b8727a25ae81abc50a8080e4029 | [] | no_license | CoreTaxxe/pygameGUI | c9ecbeac8cfc488534d237624d204031857189e1 | 6859a3cc5d52a3534988e919ddc4529f40c11476 | refs/heads/master | 2020-05-15T23:29:39.218472 | 2019-04-21T16:15:25 | 2019-04-21T16:15:25 | 182,555,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | from button import Button
from label import Label
from inputbox import InputBox
from fontobjects import render_text | [
"47256718+CoreTaxxe@users.noreply.github.com"
] | 47256718+CoreTaxxe@users.noreply.github.com |
9b1eca8722ab502f4b29d6c68e3f4f0f937f5538 | 5dc8d8dfa3076e3f2616a7c233ae0298bc551986 | /RPi/Assignment5.py | 8fbc2c0343f24c3b7bda49105db705777b6282a6 | [] | no_license | mdruiz/EECS-113-Hardware-Software-Interface | e1062bea69423e9636ddbace57c4e330c05aa78d | 80b3350302b3d2f21c18025f7783bc25d4ddeddf | refs/heads/master | 2021-01-01T17:40:48.214643 | 2017-07-23T22:51:50 | 2017-07-23T22:51:50 | 98,129,998 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,019 | py | import smtplib
from email.mime.text import MIMEText
import datetime
import socket
import time
import getpass
import RPi.GPIO as GPIO
# Account Information
RECIPIENT = 'xxxxxx@gmail.com' # Email to send to. (CAN BE ANYTHING)
SENDER = 'xxxxxx@gmail.com' # Email to send from. (MUST BE GMAIL)
PASSWORD = ''
def sendSOS():
print("Sending")
tries = 0
while True:
if (tries > 10):
exit()
print str(tries) + "\n"
try:
smtpserver = smtplib.SMTP('smtp.gmail.com', 587, timeout=30)
break
except Exception as e:
tries = tries + 1
time.sleep(1)
smtpserver.ehlo() # Says 'hello' to the server
smtpserver.starttls() # Start TLS encryption
smtpserver.ehlo()
smtpserver.login(SENDER, PASSWORD) # Log in to server
today = datetime.date.today() # Get current time/date
# Creates the text, subject, 'from', and 'to' of the message.
msg = MIMEText("Where are you?\n")
msg['Subject'] = 'SOS %s' % today.strftime('%b %d %Y')
msg['From'] = SENDER
msg['To'] = RECIPIENT
# Sends the message
smtpserver.sendmail(SENDER, [RECIPIENT], msg.as_string())
# Closes the smtp server.
smtpserver.quit()
def driveLEDs(STATE):
if STATE:
GPIO.output(LED1, True)
GPIO.output(LED2, False)
else:
GPIO.output(LED1, False)
GPIO.output(LED2, True)
STATE = True
QUANTUM = 0.25
ELAPSED = 0.0
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
LED1 = 18 # Green
LED2 = 23 # Red
SWITCH = 16
GPIO.setup(LED1, GPIO.OUT)
GPIO.setup(LED2, GPIO.OUT)
GPIO.setup(SWITCH, GPIO.IN)
PASSWORD = getpass.getpass('Your Gmail Password:')
if __name__ == '__main__':
try:
print 'Press Ctrl-C to quit.'
while True:
driveLEDs( STATE )
time.sleep(QUANTUM)
ELAPSED = ELAPSED + QUANTUM
print("Time: " + str(ELAPSED))
if( STATE != GPIO.input(SWITCH) ):
STATE = GPIO.input(SWITCH)
ELAPSED = 0
print("Switching to: " + str(STATE))
elif (ELAPSED > 30):
sendSOS()
ELAPSED = 0
finally:
GPIO.cleanup() | [
"noreply@github.com"
] | noreply@github.com |
5219f4343b19955932f9428cd6389673b4742d66 | 3a987a3db71e4f601cd7b1810be432e9dc33599b | /day8/多线程/守护线程.py | 7022da15ab13620775fb9dbb38908ae268151e1a | [] | no_license | jiaziming/new-old | da8332bfe33f597e3cb3fbeeeb61c22e47760727 | cd120eb88a9e52fdf7f079bcac6fff28c2cc2207 | refs/heads/master | 2020-04-22T09:40:21.429429 | 2019-04-04T08:18:26 | 2019-04-04T08:18:26 | 170,280,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | #!/usr/bin/python
# -*-coding:utf-8-*-
import time,threading
def run(n):
print('(%s) -------runing -----\n' %n)
time.sleep(2)
print('----done----')
def main():
for i in range(5):
a = threading.Thread(target=run,args=[i,])
a.start()
#a.join()
print('starting thread',a.getName())
m =threading.Thread(target=main,args=[])
m.setDaemon(True)
m.start()
m.join(timeout=2)
print('------main thread done------') | [
"jiaziming123@sina.cn"
] | jiaziming123@sina.cn |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.