desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Similar to `apt-get update`'
def update(self):
try: with open('/var/log/ee/ee.log', 'a') as f: proc = subprocess.Popen('apt-get update', shell=True, stdin=None, stdout=f, stderr=subprocess.PIPE, executable='/bin/bash') proc.wait() (output, error_output) = proc.communicate() if ('NO_PUBKEY' in str(error_...
'Similar to `apt-get upgrade`'
def check_upgrade(self):
try: check_update = subprocess.Popen(['apt-get upgrade -s | grep "^Inst" | wc -l'], stdout=subprocess.PIPE, shell=True).communicate()[0] if (check_update == '0\n'): Log.error(self, 'No package updates available') Log.info(self, 'Following packa...
'Similar to `apt-get upgrade`'
def dist_upgrade(self):
try: with open('/var/log/ee/ee.log', 'a') as f: proc = subprocess.Popen('DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" -y ', shell=True, stdin=None, stdout=f, stderr=f, executable='/bin/bash')...
'Similar to `apt-get autoclean`'
def auto_clean(self):
try: orig_out = sys.stdout sys.stdout = open(self.app.config.get('log.logging', 'file'), encoding='utf-8', mode='a') apt_get.autoclean('-y') sys.stdout = orig_out except ErrorReturnCode as e: Log.debug(self, '{0}'.format(e)) Log.error(self, 'Unable to apt-ge...
'Similar to `apt-get autoremove`'
def auto_remove(self):
try: Log.debug(self, 'Running apt-get autoremove') apt_get.autoremove('-y') except ErrorReturnCode as e: Log.debug(self, '{0}'.format(e)) Log.error(self, 'Unable to apt-get autoremove')
'Checks if package is available in cache and is installed or not returns True if installed otherwise returns False'
def is_installed(self, package_name):
apt_cache = apt.cache.Cache() apt_cache.open() if ((package_name.strip() in apt_cache) and apt_cache[package_name.strip()].is_installed): return True return False
'Similar to `apt-get install --download-only PACKAGE_NAME`'
def download_only(self, package_name, repo_url=None, repo_key=None):
packages = ' '.join(package_name) try: with open('/var/log/ee/ee.log', 'a') as f: if (repo_url is not None): EERepo.add(self, repo_url=repo_url) if (repo_key is not None): EERepo.add_key(self, repo_key) proc = subprocess.Popen('apt-g...
'Function to extract tar.gz file'
def extract(self, file, path):
try: tar = tarfile.open(file) tar.extractall(path=path) tar.close() os.remove(file) return True except tarfile.TarError as e: Log.debug(self, '{0}'.format(e)) Log.error(self, 'Unable to extract file \\{0}'.format(file)) return False
'Makes connection with MySQL server'
def connect(self):
try: if os.path.exists('/etc/mysql/conf.d/my.cnf'): connection = pymysql.connect(read_default_file='/etc/mysql/conf.d/my.cnf') else: connection = pymysql.connect(read_default_file='~/.my.cnf') return connection except ValueError as e: Log.debug(self, str(e...
'Get login details from /etc/mysql/conf.d/my.cnf & Execute MySQL query'
def execute(self, statement, errormsg='', log=True):
connection = EEMysql.connect(self) (log and Log.debug(self, 'Exceuting MySQL Statement : {0}'.format(statement))) try: cursor = connection.cursor() sql = statement cursor.execute(sql) connection.commit() except AttributeError as e: Log.debug(self, str(...
'Initialize'
def __init__(self):
pass
'This function used to add apt repositories and or ppa\'s If repo_url is provided adds repo file to /etc/apt/sources.list.d/ If ppa is provided add apt-repository using add-apt-repository command.'
def add(self, repo_url=None, ppa=None):
if (repo_url is not None): repo_file_path = ('/etc/apt/sources.list.d/' + EEVariables().ee_repo_file) try: if (not os.path.isfile(repo_file_path)): with open(repo_file_path, encoding='utf-8', mode='a') as repofile: repofile.write(repo_url) ...
'This function used to remove ppa\'s If ppa is provided adds repo file to /etc/apt/sources.list.d/ command.'
def remove(self, ppa=None, repo_url=None):
if ppa: EEShellExec.cmd_exec(self, "add-apt-repository -y --remove '{ppa_name}'".format(ppa_name=ppa)) elif repo_url: repo_file_path = ('/etc/apt/sources.list.d/' + EEVariables().ee_repo_file) try: repofile = open(repo_file_path, 'w+') repofile.write(repo...
'This function adds imports repository keys from keyserver. default keyserver is hkp://keys.gnupg.net user can provide other keyserver with keyserver="hkp://xyz"'
def add_key(self, keyids, keyserver=None):
EEShellExec.cmd_exec(self, ('gpg --keyserver {serv}'.format(serv=(keyserver or 'hkp://keys.gnupg.net')) + ' --recv-keys {key}'.format(key=keyids))) EEShellExec.cmd_exec(self, ('gpg -a --export --armor {0}'.format(keyids) + ' | apt-key add - '))
'Arguments: (str) @folder: the folder to watch (callable) @callback: a function which is called every time a new line in a file being watched is found; this is called with "filename" and "lines" arguments. (list) @extensions: only watch files with these extensions (int) @tail_lines: read last N lines from files being w...
def __init__(self, filelist, callback, extensions=['log'], tail_lines=0):
self.files_map = {} self.filelist = filelist self.callback = callback self.extensions = extensions for file in self.filelist: assert os.path.isfile(file) assert callable(callback) self.update_files() for (id, file) in list(iter(self.files_map.items())): file.seek(os.path....
'Start the loop. If async is True make one loop then return.'
def loop(self, interval=0.1, async=False):
while 1: self.update_files() for (fid, file) in list(iter(self.files_map.items())): self.readfile(file) if async: return time.sleep(interval)
'Log when a file is un/watched'
def log(self, line):
print line
'Read last N lines from file fname.'
@staticmethod def tail(fname, window):
try: f = open(fname, encoding='utf-8', mode='r') except IOError as err: if (err.errno == errno.ENOENT): return [] else: raise else: BUFSIZ = 1024 f.seek(0, os.SEEK_END) fsize = f.tell() block = (-1) data = '' exi...
'Initialize'
def __init__():
pass
'Swap addition with EasyEngine'
def add(self):
if (EEVariables.ee_ram < 512): if (EEVariables.ee_swap < 1000): Log.info(self, 'Adding SWAP file, please wait...') EEAptGet.update(self) EEAptGet.install(self, ['dphys-swapfile']) EEShellExec.cmd_exec(self, 'service dphys-swapfile stop') ...
'Run shell command from Python'
def cmd_exec(self, command, errormsg='', log=True):
try: (log and Log.debug(self, 'Running command: {0}'.format(command))) with subprocess.Popen([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) as proc: (cmd_stdout_bytes, cmd_stderr_bytes) = proc.communicate() (cmd_stdout, cmd_stderr) = (cmd_stdout_...
'Open files using sensible editor'
def invoke_editor(self, filepath, errormsg=''):
try: subprocess.call(['sensible-editor', filepath]) except OSError as e: Log.debug(self, '{0}{1}'.format(e.errno, e.strerror)) raise CommandExecutionError
'Run shell command from Python'
def cmd_exec_stdout(self, command, errormsg='', log=True):
try: (log and Log.debug(self, 'Running command: {0}'.format(command))) with subprocess.Popen([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) as proc: (cmd_stdout_bytes, cmd_stderr_bytes) = proc.communicate() (cmd_stdout, cmd_stderr) = (cmd_stdout_...
'start service Similar to `service xyz start`'
def start_service(self, service_name):
try: if (service_name in ['nginx', 'php5-fpm']): service_cmd = '{0} -t && service {0} start'.format(service_name) else: service_cmd = 'service {0} start'.format(service_name) Log.info(self, 'Start : {0:10}'.format(service_name), end='') ...
'Stop service Similar to `service xyz stop`'
def stop_service(self, service_name):
try: Log.info(self, 'Stop : {0:10}'.format(service_name), end='') retcode = subprocess.getstatusoutput('service {0} stop'.format(service_name)) if (retcode[0] == 0): Log.info(self, (((('[' + Log.ENDC) + 'OK') + Log.OKBLUE) + ']')) return True else:...
'Restart service Similar to `service xyz restart`'
def restart_service(self, service_name):
try: if (service_name in ['nginx', 'php5-fpm']): service_cmd = '{0} -t && service {0} restart'.format(service_name) else: service_cmd = 'service {0} restart'.format(service_name) Log.info(self, 'Restart : {0:10}'.format(service_name), end=''...
'Stop service Similar to `service xyz stop`'
def reload_service(self, service_name):
try: if (service_name in ['nginx', 'php5-fpm']): service_cmd = '{0} -t && service {0} reload'.format(service_name) else: service_cmd = 'service {0} reload'.format(service_name) Log.info(self, 'Reload : {0:10}'.format(service_name), end='') ...
'Initializes Directory as repository if not already git repo. and adds uncommited changes automatically'
def add(self, paths, msg='Intializating'):
for path in paths: global git git = git.bake('--git-dir={0}/.git'.format(path), '--work-tree={0}'.format(path)) if os.path.isdir(path): if (not os.path.isdir((path + '/.git'))): try: Log.debug(self, 'EEGit: git init at {0}'.format(p...
'Checks status of file, If its tracked or untracked.'
def checkfilestatus(self, repo, filepath):
global git git = git.bake('--git-dir={0}/.git'.format(repo), '--work-tree={0}'.format(repo)) status = git.status('-s', '{0}'.format(filepath)) if (len(status.splitlines()) > 0): return True else: return False
'Default function of log show'
@expose(hide=True) def default(self):
self.msg = [] if self.app.pargs.php: self.app.pargs.nginx = True if ((not self.app.pargs.nginx) and (not self.app.pargs.fpm) and (not self.app.pargs.mysql) and (not self.app.pargs.access) and (not self.app.pargs.wp) and (not self.app.pargs.site_name)): self.app.pargs.nginx = True sel...
'Default function of log reset'
@expose(hide=True) def default(self):
self.msg = [] if self.app.pargs.php: self.app.pargs.nginx = True if ((not self.app.pargs.nginx) and (not self.app.pargs.fpm) and (not self.app.pargs.mysql) and (not self.app.pargs.access) and (not self.app.pargs.wp) and (not self.app.pargs.site_name) and (not self.app.pargs.slow_log_db)): se...
'Default function of log GZip'
@expose(hide=True) def default(self):
self.msg = [] if self.app.pargs.php: self.app.pargs.nginx = True if ((not self.app.pargs.nginx) and (not self.app.pargs.fpm) and (not self.app.pargs.mysql) and (not self.app.pargs.access) and (not self.app.pargs.wp) and (not self.app.pargs.site_name)): self.app.pargs.nginx = True sel...
'Default function of log Mail'
@expose(hide=True) def default(self):
self.msg = [] if self.app.pargs.php: self.app.pargs.nginx = True if ((not self.app.pargs.nginx) and (not self.app.pargs.fpm) and (not self.app.pargs.mysql) and (not self.app.pargs.access) and (not self.app.pargs.wp) and (not self.app.pargs.site_name)): self.app.pargs.nginx = True sel...
'This function Secures authentication'
@expose(hide=True) def secure_auth(self):
passwd = ''.join([random.choice((string.ascii_letters + string.digits)) for n in range(6)]) if (not self.app.pargs.user_input): username = input('Provide HTTP authentication user name [{0}] :'.format(EEVariables.ee_user)) self.app.pargs.user_input = username if (usernam...
'This function Secures port'
@expose(hide=True) def secure_port(self):
if self.app.pargs.user_input: while (not self.app.pargs.user_input.isdigit()): Log.info(self, 'Please Enter valid port number ') self.app.pargs.user_input = input('EasyEngine admin port [22222]:') if (not self.app.pargs.user_input): port = input('E...
'This function Secures IP'
@expose(hide=True) def secure_ip(self):
newlist = [] if (not self.app.pargs.user_input): ip = input('Enter the comma separated IP addresses to white list [127.0.0.1]:') self.app.pargs.user_input = ip try: user_ip = self.app.pargs.user_input.split(',') except Exception as e: user_ip = ...
'Display Nginx information'
@expose(hide=True) def info_nginx(self):
version = os.popen("nginx -v 2>&1 | cut -d':' -f2 | cut -d' ' -f2 | cut -d'/' -f2 | tr -d '\n'").read() allow = os.popen("grep ^allow /etc/nginx/common/acl.conf | cut -d' ' -f2 | cut -d';' -f1 | tr '\n' ' '"...
'Display PHP information'
@expose(hide=True) def info_php(self):
version = os.popen(("{0} -v 2>/dev/null | head -n1 | cut -d' ' -f2 |".format(('php5.6' if ((EEVariables.ee_platform_codename == 'trusty') or (EEVariables.ee_platform_codename == 'xenial')) else 'php')) + " cut -d'+' -f1 | tr -d '\n'")).read config = configpa...
'Display PHP information'
@expose(hide=True) def info_php7(self):
version = os.popen("php7.0 -v 2>/dev/null | head -n1 | cut -d' ' -f2 | cut -d'+' -f1 | tr -d '\n'").read config = configparser.ConfigParser() config.read('/etc/php/7.0/fpm/php.ini') expose_php = config['PHP']['expose_php'] memory_limit = config['...
'Display MySQL information'
@expose(hide=True) def info_mysql(self):
version = os.popen("mysql -V | awk '{print($5)}' | cut -d ',' -f1 | tr -d '\n'").read() host = 'localhost' port = os.popen('mysql -e "show variables" | grep ^port | awk \'{print($2)}\' | tr -d \'\n\'').read() wait_timeout = os...
'default function for info'
@expose(hide=True) def default(self):
if ((not self.app.pargs.nginx) and (not self.app.pargs.php) and (not self.app.pargs.mysql) and (not self.app.pargs.php7)): self.app.pargs.nginx = True self.app.pargs.php = True self.app.pargs.mysql = True if EEAptGet.is_installed(self, 'php7.0-fpm'): self.app.pargs.php = ...
'Start services'
@expose(help='Start stack services') def start(self):
services = [] if (not (self.app.pargs.nginx or self.app.pargs.php or self.app.pargs.php7 or self.app.pargs.mysql or self.app.pargs.postfix or self.app.pargs.hhvm or self.app.pargs.memcache or self.app.pargs.dovecot or self.app.pargs.redis)): self.app.pargs.nginx = True self.app.pargs.php = True ...
'Stop services'
@expose(help='Stop stack services') def stop(self):
services = [] if (not (self.app.pargs.nginx or self.app.pargs.php or self.app.pargs.php7 or self.app.pargs.mysql or self.app.pargs.postfix or self.app.pargs.hhvm or self.app.pargs.memcache or self.app.pargs.dovecot or self.app.pargs.redis)): self.app.pargs.nginx = True self.app.pargs.php = True ...
'Restart services'
@expose(help='Restart stack services') def restart(self):
services = [] if (not (self.app.pargs.nginx or self.app.pargs.php or self.app.pargs.php7 or self.app.pargs.mysql or self.app.pargs.postfix or self.app.pargs.hhvm or self.app.pargs.memcache or self.app.pargs.dovecot or self.app.pargs.redis)): self.app.pargs.nginx = True self.app.pargs.php = True ...
'Status of services'
@expose(help='Get stack status') def status(self):
services = [] if (not (self.app.pargs.nginx or self.app.pargs.php or self.app.pargs.php7 or self.app.pargs.mysql or self.app.pargs.postfix or self.app.pargs.hhvm or self.app.pargs.memcache or self.app.pargs.dovecot or self.app.pargs.redis)): self.app.pargs.nginx = True self.app.pargs.php = True ...
'Reload service'
@expose(help='Reload stack services') def reload(self):
services = [] if (not (self.app.pargs.nginx or self.app.pargs.php or self.app.pargs.php7 or self.app.pargs.mysql or self.app.pargs.postfix or self.app.pargs.hhvm or self.app.pargs.memcache or self.app.pargs.dovecot or self.app.pargs.redis)): self.app.pargs.nginx = True self.app.pargs.php = True ...
'default action of ee stack command'
@expose(hide=True) def default(self):
if self.app.pargs.pagespeed: Log.error(self, 'Pagespeed support has been dropped since EasyEngine v3.6.0', False) Log.error(self, 'Please run command again without `--pagespeed`', False) Log.error(self, 'For more details, read - https://easy...
'Pre settings to do before installation packages'
@expose(hide=True) def pre_pref(self, apt_packages):
if set(EEVariables.ee_postfix).issubset(set(apt_packages)): Log.debug(self, 'Pre-seeding Postfix') try: EEShellExec.cmd_exec(self, 'echo "postfix postfix/main_mailer_type string \'Internet Site\'" | debconf-set-selections') EEShellExec.cmd_exec(self, '...
'Post activity after installation of packages'
@expose(hide=True) def post_pref(self, apt_packages, packages):
if len(apt_packages): if set(EEVariables.ee_postfix).issubset(set(apt_packages)): EEGit.add(self, ['/etc/postfix'], msg='Adding Postfix into Git') EEService.reload_service(self, 'postfix') if set(EEVariables.ee_nginx).issubset(set(apt_packages)): if (set(...
'Start installation of packages'
@expose(help='Install packages') def install(self, packages=[], apt_packages=[], disp_msg=True):
if self.app.pargs.pagespeed: Log.error(self, 'Pagespeed support has been dropped since EasyEngine v3.6.0', False) Log.error(self, 'Please run command again without `--pagespeed`', False) Log.error(self, 'For more details, read - https://easy...
'Start removal of packages'
@expose(help='Remove packages') def remove(self):
apt_packages = [] packages = [] if self.app.pargs.pagespeed: Log.error(self, 'Pagespeed support has been dropped since EasyEngine v3.6.0', False) Log.error(self, 'Please run command again without `--pagespeed`', False) Log.error(self, 'For more ...
'Start purging of packages'
@expose(help='Purge packages') def purge(self):
apt_packages = [] packages = [] if self.app.pargs.pagespeed: Log.error(self, 'Pagespeed support has been dropped since EasyEngine v3.6.0', False) Log.error(self, 'Please run command again without `--pagespeed`', False) Log.error(self, 'For more ...
'1. reads database information from wp/ee-config.php 2. updates records into ee database accordingly.'
@expose(hide=True) def sync(self):
Log.info(self, 'Synchronizing ee database, please wait...') sites = getAllsites(self) if (not sites): pass for site in sites: if (site.site_type in ['mysql', 'wp', 'wpsubdir', 'wpsubdomain']): ee_site_webroot = site.site_path configfiles = glob.glob((e...
'This function clears Redis cache'
@expose(hide=True) def clean_redis(self):
if EEAptGet.is_installed(self, 'redis-server'): Log.info(self, 'Cleaning Redis cache') EEShellExec.cmd_exec(self, 'redis-cli flushall') else: Log.info(self, 'Redis is not installed')
'This function Clears memcache'
@expose(hide=True) def clean_memcache(self):
try: if EEAptGet.is_installed(self, 'memcached'): EEService.restart_service(self, 'memcached') Log.info(self, 'Cleaning MemCache') else: Log.info(self, 'Memcache not installed') except Exception as e: Log.debug(self, '{0}'.format(e)) L...
'This function clears Fastcgi cache'
@expose(hide=True) def clean_fastcgi(self):
if os.path.isdir('/var/run/nginx-cache'): Log.info(self, 'Cleaning NGINX FastCGI cache') EEShellExec.cmd_exec(self, 'rm -rf /var/run/nginx-cache/*') else: Log.error(self, 'Unable to clean FastCGI cache', False)
'This function clears opcache'
@expose(hide=True) def clean_opcache(self):
try: Log.info(self, 'Cleaning opcache') wp = urllib.request.urlopen(' https://127.0.0.1:22222/cache/opcache/opgui.php?page=reset').read() except Exception as e: Log.debug(self, '{0}'.format(e)) Log.debug(self, 'Unable hit url, https://127.0.0.1:22222/cache/opca...
'Start/Stop Nginx debug'
@expose(hide=True) def debug_nginx(self):
if ((self.app.pargs.nginx == 'on') and (not self.app.pargs.site_name)): try: debug_address = self.app.config.get('stack', 'ip-address').split() except Exception as e: debug_address = ['0.0.0.0/0'] if ((debug_address == ['127.0.0.1']) or (debug_address == [])): ...
'Start/Stop PHP debug'
@expose(hide=True) def debug_php(self):
if ((self.app.pargs.php == 'on') and (not self.app.pargs.site_name)): if (not EEShellExec.cmd_exec(self, 'sed -n "/upstream php{/,/}/p " /etc/nginx/conf.d/upstream.conf | grep 9001')): Log.info(self, 'Enabling PHP debug') nc = NginxConfig() n...
'Start/Stop PHP5-FPM debug'
@expose(hide=True) def debug_fpm(self):
if ((self.app.pargs.fpm == 'on') and (not self.app.pargs.site_name)): if (not EEShellExec.cmd_exec(self, 'grep "log_level = debug" /etc/{0}/fpm/php-fpm.conf'.format(('php/5.6' if ((EEVariables.ee_platform_codename == 'trusty') or (EEVariables.ee_platform_codename == 'xenial')) else 'php5')))): ...
'Start/Stop PHP debug'
@expose(hide=True) def debug_php7(self):
if ((self.app.pargs.php7 == 'on') and (not self.app.pargs.site_name)): if ((EEVariables.ee_platform_codename == 'wheezy') or (EEVariables.ee_platform_codename == 'precise')): Log.error(self, 'PHP 7.0 not supported.') if (not EEShellExec.cmd_exec(self, 'sed -n "/upstream ...
'Start/Stop PHP5-FPM debug'
@expose(hide=True) def debug_fpm7(self):
if ((self.app.pargs.fpm7 == 'on') and (not self.app.pargs.site_name)): if (not EEShellExec.cmd_exec(self, 'grep "log_level = debug" /etc/php/7.0/fpm/php-fpm.conf')): Log.info(self, 'Setting up PHP7.0-FPM log_level = debug') config = configparser.ConfigParse...
'Start/Stop MySQL debug'
@expose(hide=True) def debug_mysql(self):
if ((self.app.pargs.mysql == 'on') and (not self.app.pargs.site_name)): if (not EEShellExec.cmd_exec(self, 'mysql -e "show variables like \'slow_query_log\';" | grep ON')): Log.info(self, 'Setting up MySQL slow log') EEMysql.execute(self, "set g...
'Start/Stop WordPress debug'
@expose(hide=True) def debug_wp(self):
if ((self.app.pargs.wp == 'on') and self.app.pargs.site_name): wp_config = '{0}/{1}/wp-config.php'.format(EEVariables.ee_webroot, self.app.pargs.site_name) webroot = '{0}{1}'.format(EEVariables.ee_webroot, self.app.pargs.site_name) if (not os.path.isfile(wp_config)): wp_config = ...
'Start/Stop Nginx rewrite rules debug'
@expose(hide=True) def debug_rewrite(self):
if ((self.app.pargs.rewrite == 'on') and (not self.app.pargs.site_name)): if (not EEShellExec.cmd_exec(self, 'grep "rewrite_log on;" /etc/nginx/nginx.conf')): Log.info(self, 'Setting up Nginx rewrite logs') EEShellExec.cmd_exec(self, "sed -i '/http {/a ...
'Handle Ctrl+c hevent for -i option of debug'
@expose(hide=True) def signal_handler(self, signal, frame):
self.start = False if self.app.pargs.nginx: self.app.pargs.nginx = 'off' self.debug_nginx() if self.app.pargs.php: self.app.pargs.php = 'off' self.debug_php() if self.app.pargs.php7: self.app.pargs.php7 = 'off' self.debug_php7() if self.app.pargs.fpm: ...
'Default function of debug'
@expose(hide=True) def default(self):
self.interactive = False self.msg = [] self.trigger_nginx = False self.trigger_php = False if ((not self.app.pargs.nginx) and (not self.app.pargs.php) and (not self.app.pargs.php7) and (not self.app.pargs.fpm) and (not self.app.pargs.fpm7) and (not self.app.pargs.mysql) and (not self.app.pargs.wp) a...
'Default function for import slow log'
@expose(hide=True) def import_slow_log(self):
if os.path.isdir('{0}22222/htdocs/db/anemometer'.format(EEVariables.ee_webroot)): if os.path.isfile('/var/log/mysql/mysql-slow.log'): Log.info(self, 'Importing MySQL slow log to Anemometer') host = os.popen(('grep -e "\'host\'" {0}22222/htdocs/'.format(EEVaria...
'Override setup actions (for every test).'
def setUp(self):
super(EETestCase, self).setUp()
'Override teardown actions (for every test).'
def tearDown(self):
super(EETestCase, self).tearDown()
'Method called to prepare the test fixture. This is called by the unittest framework immediately before calling the test method; any exception raised by this method will be considered an error rather than a test failure. The default implementation does nothing.'
def setUp(self):
pass
'Method called immediately after the test method has been called and the result recorded. This is called even if the test method raised an exception, so the implementation in subclasses may need to be particularly careful about checking internal state. Any exception raised by this method will be considered an error rat...
def tearDown(self):
self.resetExtraLogItems()
'Override to force unittest framework to use test method names instead of docstrings in the report.'
def shortDescription(self):
return None
'Print out what test we are running'
def _printTestHeader(self):
print '###############################################################' print ('Running test: %s.%s...' % (self.__class__, self._testMethodName))
'Put the path to our datasets int the NTA_DATA_PATH variable which will be used to set the environment for each of the workers Parameters: env: The current environment dict'
def _setDataPath(self, env):
assert (env is not None) if ('NTA_DATA_PATH' in env): newPath = ('%s%s%s' % (env['NTA_DATA_PATH'], os.pathsep, g_myEnv.testSrcDataDir)) else: newPath = g_myEnv.testSrcDataDir env['NTA_DATA_PATH'] = newPath
'Launch worker processes to execute the given command line Parameters: cmdLine: The command line for each worker numWorkers: number of workers to launch retval: list of workers'
def _launchWorkers(self, cmdLine, numWorkers):
workers = [] for i in range(numWorkers): stdout = tempfile.TemporaryFile() stderr = tempfile.TemporaryFile() p = subprocess.Popen(cmdLine, bufsize=1, env=os.environ, shell=True, stdin=None, stdout=stdout, stderr=stderr) workers.append(p) return workers
'Return the job info for a job Parameters: cjDAO: client jobs database instance workers: list of workers for this job jobID: which job ID retval: job info'
def _getJobInfo(self, cjDAO, workers, jobID):
jobInfo = cjDAO.jobInfo(jobID) runningCount = 0 for worker in workers: retCode = worker.poll() if (retCode is None): runningCount += 1 if (runningCount > 0): status = ClientJobsDAO.STATUS_RUNNING else: status = ClientJobsDAO.STATUS_COMPLETED jobInfo = ...
'This method generates a canned Hypersearch Job Params structure based on some high level options Parameters: predictionCacheMaxRecords: If specified, determine the maximum number of records in the prediction cache. dataPath: When expDirectory is not specified, this is the data file to be used for the operation. I...
def _generateHSJobParams(self, expDirectory=None, hsImp='v2', maxModels=2, predictionCacheMaxRecords=None, dataPath=None, maxRecords=10):
if (expDirectory is not None): descriptionPyPath = os.path.join(expDirectory, 'description.py') permutationsPyPath = os.path.join(expDirectory, 'permutations.py') permutationsPyContents = open(permutationsPyPath, 'r').read() descriptionPyContents = open(descriptionPyPath, 'r').read()...
'This runs permutations on the given experiment using just 1 worker in the current process Parameters: jobParams: filled in job params for a hypersearch loggingLevel: logging level to use in the Hypersearch worker env: if not None, this is a dict of environment variables that should be sent to eac...
def _runPermutationsLocal(self, jobParams, loggingLevel=logging.INFO, env=None, waitForCompletion=True, continueJobId=None, ignoreErrModels=False):
print print '==================================================================' print 'Running Hypersearch job using 1 worker in current process' print '==================================================================' if (env is not None): saveEnvState = copy.deep...
'Given a prepared, filled in jobParams for a hypersearch, this starts the job, waits for it to complete, and returns the results for all models. Parameters: jobParams: filled in job params for a hypersearch loggingLevel: logging level to use in the Hypersearch worker maxNumWorkers: max # of worker processe...
def _runPermutationsCluster(self, jobParams, loggingLevel=logging.INFO, maxNumWorkers=4, env=None, waitForCompletion=True, ignoreErrModels=False, timeoutSec=DEFAULT_JOB_TIMEOUT_SEC):
print print '==================================================================' print 'Running Hypersearch job on cluster' print '==================================================================' if ((env is not None) and (len(env) > 0)): envItems = [] for (key, value)...
'This runs permutations on the given experiment using just 1 worker Parameters: expDirectory: directory containing the description.py and permutations.py hsImp: which implementation of Hypersearch to use maxModels: max # of models to generate maxNumWorkers: max # of workers to use, N/A if onCluster...
def runPermutations(self, expDirectory, hsImp='v2', maxModels=2, maxNumWorkers=4, loggingLevel=logging.INFO, onCluster=False, env=None, waitForCompletion=True, continueJobId=None, dataPath=None, maxRecords=None, timeoutSec=None, ignoreErrModels=False, predictionCacheMaxRecords=None, **kwargs):
if (env is None): env = dict() self._setDataPath(env) jobParams = self._generateHSJobParams(expDirectory=expDirectory, hsImp=hsImp, maxModels=maxModels, maxRecords=maxRecords, dataPath=dataPath, predictionCacheMaxRecords=predictionCacheMaxRecords) jobParams.update(kwargs) if onCluster: ...
'Try running simple permutations'
def testSimpleV2(self, onCluster=False, env=None, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') if (env is None): env = dict() env['NTA_TEST_numIterations'] = '99' env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) (jobID, jobInfo, resultInfos, metricR...
'Try running a simple permutations with delta encoder Test which tests the delta encoder. Runs a swarm of the sawtooth dataset With a functioning delta encoder this should give a perfect result DEBUG: disabled temporarily because this test takes too long!!!'
def testDeltaV2(self, onCluster=False, env=None, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'delta') if (env is None): env = dict() env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) env['NTA_TEST_exitAfterNModels'] = str(20) (jobID, jobInfo, resultInfos, metr...
'Try running a simple permutations'
def testSimpleV2NoSpeculation(self, onCluster=False, env=None, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') if (env is None): env = dict() env['NTA_TEST_numIterations'] = '99' env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) (jobID, jobInfo, resultInfos, metricR...
'Try running a simple permutations using an actual CLA model, not a dummy'
def testHTMPredictionModelV2(self, onCluster=False, env=None, maxModels=2, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'dummyV2') if (env is None): env = dict() (jobID, jobInfo, resultInfos, metricResults, minErrScore) = self.runPermutations(expDir, hsImp='v2', loggingLevel=g_myEnv.options.logLevel, onCluster=onCluster, env=env, maxModels=maxMo...
'Try running a simple permutations using an actual CLA model, not a dummy'
def testCLAMultistepModel(self, onCluster=False, env=None, maxModels=2, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simple_cla_multistep') if (env is None): env = dict() (jobID, jobInfo, resultInfos, metricResults, minErrScore) = self.runPermutations(expDir, hsImp='v2', loggingLevel=g_myEnv.options.logLevel, onCluster=onCluster, env=env, ma...
'Try running a simple permutations using an actual CLA model, not a dummy. This is a legacy CLA multi-step model that doesn\'t declare a separate \'classifierOnly\' encoder for the predicted field.'
def testLegacyCLAMultistepModel(self, onCluster=False, env=None, maxModels=2, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'legacy_cla_multistep') if (env is None): env = dict() (jobID, jobInfo, resultInfos, metricResults, minErrScore) = self.runPermutations(expDir, hsImp='v2', loggingLevel=g_myEnv.options.logLevel, onCluster=onCluster, env=env, ma...
'Try running a simple permutations'
def testFilterV2(self, onCluster=False):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') env = dict() env['NTA_TEST_maxvalFilter'] = '225' env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = '6' (jobID, jobInfo, resultInfos, metricResults, minErrScore) = self.runPermutations(expDir, hsImp='v2',...
'Try running a simple permutations where a worker comes in late, after the some models have already been evaluated'
def testLateWorker(self, onCluster=False):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') env = dict() env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) env['NTA_TEST_exitAfterNModels'] = '100' (jobID, jobInfo, resultInfos, metricResults, minErrScore) =...
'Run a worker on a model for a while, then have it exit before the model finishes. Then, run another worker, which should detect the orphaned model.'
def testOrphanedModel(self, onCluster=False, modelRange=(0, 1)):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') env = dict() env['NTA_TEST_numIterations'] = '2' env['NTA_TEST_sysExitModelRange'] = ('%d,%d' % (modelRange[0], modelRange[1])) env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmM...
'Run a worker on a model for a while, then have it exit before a model finishes in gen index 2. Then, run another worker, which should detect the orphaned model.'
def testOrphanedModelGen1(self):
self._printTestHeader() inst = OneNodeTests(self._testMethodName) return inst.testOrphanedModel(modelRange=(10, 11))
'Run with 1 or more models generating errors'
def testErredModel(self, onCluster=False, modelRange=(6, 7)):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') env = dict() env['NTA_TEST_errModelRange'] = ('%d,%d' % (modelRange[0], modelRange[1])) env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) (jobID, jobInfo, resultIn...
'Run with 1 or more models generating jobFail exception'
def testJobFailModel(self, onCluster=False, modelRange=(6, 7)):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') env = dict() env['NTA_TEST_jobFailErr'] = 'True' maxNumWorkers = 4 (jobID, jobInfo, resultInfos, metricResults, minErrScore) = self.runPermutations(expDir, hsImp='v2', loggingLevel=g_myEnv.options.logLevel, onCluste...
'Run with too many models generating errors'
def testTooManyErredModels(self, onCluster=False, modelRange=(5, 10)):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2') env = dict() env['NTA_TEST_errModelRange'] = ('%d,%d' % (modelRange[0], modelRange[1])) env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) (jobID, jobInfo, resultIn...
'Test minimum field contribution threshold for a field to be included in further sprints'
def testFieldThreshold(self, onCluster=False, env=None, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'field_threshold_temporal') if (env is None): env = dict() env['NTA_TEST_numIterations'] = '99' env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) env['NTA_CONF_PROP_nu...
'Try running a spatial classification swarm'
def testSpatialClassification(self, onCluster=False, env=None, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'spatial_classification') if (env is None): env = dict() env['NTA_TEST_numIterations'] = '99' env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatableSwarmMaturityWindow) (jobID, jobInfo, result...
'Run a swarm where \'inputPredictedField\' is set in the permutations file. The dummy model for this swarm is designed to give the lowest error when the predicted field is INCLUDED, so make sure we don\'t get this low error'
def testAlwaysInputPredictedField(self, onCluster=False, env=None, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'input_predicted_field') if (env is None): env = dict() env['NTA_TEST_inputPredictedField'] = 'auto' env['NTA_TEST_numIterations'] = '99' env['NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm'] = ('%d' % 2) env[...
'Test minimum field contribution threshold for a field to be included in further sprints when doing a temporal search that does not require the predicted field.'
def testFieldThresholdNoPredField(self, onCluster=False, env=None, **kwargs):
self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, 'input_predicted_field') if (env is None): env = dict() env['NTA_TEST_numIterations'] = '99' env['NTA_TEST_inputPredictedField'] = 'auto' env['NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow'] = ('%d' % g_repeatable...
'Try running a simple permutations'
def testSimpleV2(self):
self._printTestHeader() inst = OneNodeTests(self._testMethodName) return inst.testSimpleV2(onCluster=True)
'Try running a simple permutations'
def testDeltaV2(self):
self._printTestHeader() inst = OneNodeTests(self._testMethodName) return inst.testDeltaV2(onCluster=True)