text
stringlengths
1
93.6k
p.cpu_affinity(all_cpus)
# Set up logging
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
form = logging.Formatter("[%(levelname)s/%(processName)s] %(asctime)s %(message)s")
# Handler for logging to stderr
sh = logging.StreamHandler(stream=sys.stdout)
sh.setLevel(logging.WARN) # set level here
# sh.addFilter(ProcessFilter()) # filter to show only logs from manager
sh.setFormatter(form)
root_logger.addHandler(sh)
# Handler for logging to file
util.move_make_file(constants.LOGFILE)
fh = logging.handlers.RotatingFileHandler(constants.LOGFILE, maxBytes=512*1024*1024)
fh.setLevel(logging.DEBUG)
fh.setFormatter(form)
root_logger.addHandler(fh)
# Make output dir
util.move_make(exp['output_dir'])
# Make score dir and learning curve
if exp['score_dir'] is not None:
util.move_make(exp['score_dir'])
with open(os.path.join(exp['score_dir'],
'learning_curve.csv'), 'w') as score_file:
score_file.write('Time,Score\n')
# Record start time
open(os.path.join(exp['output_dir'], exp['basename'] + '.firstpost'), 'wb').close()
# Plotting?
if plot_override is not None:
exp['plot'] = plot_override
# Start manager
mgr = FixedLearnersStackingManager(exp['input_dir'], exp['output_dir'], exp['basename'],
exp['time_budget'],
compute_quantum=exp['compute_quantum'], plot=exp['plot'],
overhead_memory=constants.OVERHEAD,
cgroup_soft_limit=constants.CGROUP_SOFT_LIMIT,
cgroup_hard_limit=constants.CGROUP_HARD_LIMIT,
exp=exp)
if separate_process:
# Create process
p = Process(target=agent.start_communication, kwargs=dict(agent=mgr))
p.name = 'manager'
p.start()
print('\nPress enter to terminate at any time.\n')
while True:
if not p.is_alive():
break
# Wait for one second to see if any keyboard input
i, o, e = select.select([sys.stdin], [], [], 1)
if i:
print('\n\nTerminating')
try:
ps = psutil.Process(pid=p.pid)
ps.send_signal(signal.SIGTERM)
p.join(timeout=5)
if p.is_alive():
print("Didn't respond to SIGTERM")
util.murder_family(pid=p.pid, killall=True, sig=signal.SIGKILL)
except psutil.NoSuchProcess:
pass # already dead
break
else:
mgr.communicate()
def exp_param_defaults(exp_params):
"""Sets all missing parameters to their default values"""
defaults = dict(subset_algos=False,
error_metric=None,
compute_quantum_fixed=False,
score_dir=None,
slowdown_factor=1,
plot=False,
movie=False,
use_db=False,
strategy='stack-meta',
super_fast_subset=1000,
super_fast_timeout=np.inf,
one_shot_timeout=0.333,
anytime_timeout=1,
use_data_subsets=True,
super_fast_learners='''[
('LR-100-subset', CrossValidationAgent, dict(learner=LogisticRegression,
learner_kwargs=dict(C=100),
agent=OneShotLearnerAgent,
agent_kwargs=dict(),