text
stringlengths 1
93.6k
|
|---|
This is:
|
1. Create the Accessory object you want.
|
2. Add it to an AccessoryDriver, which will advertise it on the local network,
|
setup a server to answer client queries, etc.
|
"""
|
import logging
|
import signal
|
import random
|
from pyhap.accessory import Accessory, Bridge
|
from pyhap.accessory_driver import AccessoryDriver
|
import pyhap.loader as loader
|
from pyhap import camera
|
from pyhap.const import CATEGORY_SENSOR
|
logging.basicConfig(level=logging.INFO, format="[%(module)s] %(message)s")
|
class TemperatureSensor(Accessory):
|
"""Fake Temperature sensor, measuring every 3 seconds."""
|
category = CATEGORY_SENSOR
|
def __init__(self, *args, **kwargs):
|
super().__init__(*args, **kwargs)
|
serv_temp = self.add_preload_service('TemperatureSensor')
|
self.char_temp = serv_temp.configure_char('CurrentTemperature')
|
@Accessory.run_at_interval(3)
|
async def run(self):
|
self.char_temp.set_value(random.randint(18, 26))
|
def get_bridge(driver):
|
"""Call this method to get a Bridge instead of a standalone accessory."""
|
bridge = Bridge(driver, 'Bridge')
|
temp_sensor = TemperatureSensor(driver, 'Sensor 2')
|
temp_sensor2 = TemperatureSensor(driver, 'Sensor 1')
|
bridge.add_accessory(temp_sensor)
|
bridge.add_accessory(temp_sensor2)
|
return bridge
|
def get_accessory(driver):
|
"""Call this method to get a standalone Accessory."""
|
return TemperatureSensor(driver, 'MyTempSensor')
|
# Start the accessory on port 51826
|
driver = AccessoryDriver(port=51826)
|
# Change `get_accessory` to `get_bridge` if you want to run a Bridge.
|
driver.add_accessory(accessory=get_accessory(driver))
|
# We want SIGTERM (terminate) to be handled by the driver itself,
|
# so that it can gracefully stop the accessory, server and advertising.
|
signal.signal(signal.SIGTERM, driver.signal_handler)
|
# Start it!
|
driver.start()
|
# <FILESEP>
|
# Implementation adapted from XNAS: https://github.com/MAC-AutoML/XNAS
|
"""BigNAS subnet evaluation"""
|
import torch
|
import core.config as config
|
import logger.meter as meter
|
import logger.logging as logging
|
from core.builder import setup_env
|
from core.config import cfg
|
from datasets.loader import get_normal_dataloader
|
from logger.meter import TestMeter
|
from bignas.cnn import _infer_BigNAS_CNN
|
# Load config and check
|
config.load_configs()
|
logger = logging.get_logger(__name__)
|
def main():
|
setup_env()
|
net = _infer_BigNAS_CNN()
|
[train_loader, valid_loader] = get_normal_dataloader()
|
test_meter = TestMeter(len(valid_loader))
|
# Validate
|
top1_err, top5_err = validate(net, train_loader, valid_loader, test_meter)
|
logger.info("top1_err:{} top5_err:{}".format(top1_err, top5_err))
|
@torch.no_grad()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.