text
stringlengths 1
93.6k
|
|---|
# Init Blynk instance
|
if blynk_enabled:
|
print("Blynk upload is enabled")
|
blynk = blynklib.Blynk(read_config.blynk_token,
|
server=read_config.blynk_server.strip(),
|
heartbeat=read_config.blynk_heartbeat)
|
@blynk.handle_event("connect")
|
def connect_handler():
|
global is_connected
|
if not is_connected:
|
is_connected = True
|
print("Connected to cloud server")
|
syslog.syslog(syslog.LOG_NOTICE, "Connected to cloud server")
|
@blynk.handle_event("disconnect")
|
def disconnect_handler():
|
global is_connected
|
if is_connected:
|
is_connected = False
|
print("Disconnected from cloud server")
|
syslog.syslog(syslog.LOG_NOTICE, "Disconnected from cloud server")
|
# Init Nighscout instance (if requested)
|
if nightscout_enabled:
|
print("Nightscout upload is enabled")
|
nightscout = nightscoutlib.nightscout_uploader(server = read_config.nightscout_server,
|
secret = read_config.nightscout_api_secret)
|
##########################################################
|
# Initialization
|
##########################################################
|
syslog.syslog(syslog.LOG_NOTICE, "Starting DD-Guard daemon, version "+VERSION)
|
# Init signal handler
|
signal.signal(signal.SIGINT, on_sigterm)
|
signal.signal(signal.SIGTERM, on_sigterm)
|
upload_live_data.active = False
|
# Perform first upload immediately
|
# Subsequent uploads will be scheduled according to received data timestamp
|
t = threading.Thread(target=upload_live_data, args=())
|
t.start()
|
##########################################################
|
# Main loop
|
##########################################################
|
while True:
|
if blynk_enabled:
|
blynk.run()
|
else:
|
time.sleep(0.1)
|
# <FILESEP>
|
import os
|
import json
|
import argparse
|
import time
|
import numpy as np
|
import torch
|
from torch.autograd import Variable
|
import torch.nn.functional as F
|
import torch.nn as nn
|
from sklearn.utils.class_weight import compute_class_weight
|
from tensorboardX import SummaryWriter
|
from fastprogress import master_bar, progress_bar
|
# Remove warning
|
import warnings
|
warnings.filterwarnings("ignore", category=UserWarning)
|
from scipy.sparse import SparseEfficiencyWarning
|
warnings.simplefilter('ignore', SparseEfficiencyWarning)
|
from config import *
|
from problems.tsp.tsp_reader import TSPReader
|
from problems.tsptw.tsptw_reader import TSPTWReader
|
from models.gcn_model import ResidualGatedGCNModel
|
from models.sparse_wrapper import wrap_sparse
|
from models.prep_wrapper import PrepWrapResidualGatedGCNModel
|
parser = argparse.ArgumentParser(description='gcn_tsp_parser')
|
parser.add_argument('-c','--config', type=str, default="configs/default.json")
|
args = parser.parse_args()
|
config_path = args.config
|
config = get_config(config_path)
|
print("Loaded {}:\n{}".format(config_path, config))
|
is_tsptw = config.get('problem', 'tsp') == 'tsptw'
|
DataReader = TSPTWReader if is_tsptw else TSPReader
|
if torch.cuda.is_available():
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.