code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def load_tf_lib(): from os.path import join as pjoin import pkg_resources import tensorflow as tf path = pjoin('ext', 'rime.so') rime_lib_path = pkg_resources.resource_filename("montblanc", path) return tf.load_op_library(rime_lib_path)
Load the tensorflow library
def save_log_entry(self, log_entry_form, *args, **kwargs): if log_entry_form.is_for_update(): return self.update_log_entry(log_entry_form, *args, **kwargs) else: return self.create_log_entry(log_entry_form, *args, **kwargs)
Pass through to provider LogEntryAdminSession.update_log_entry
def _disconnect_signals(self, model): for signal, receiver in self._signals.items(): signal.disconnect(sender=model, dispatch_uid=self._dispatch_uid(signal, model))
Disconnect signals for the model.
def print_plugin_args(plugin_path): args = config_utils.get_config_parameters(plugin_path) args_format = "{:20} {:10} {:^15} {:^10} {:25}" title = args_format.format(defs.NAME.upper(), defs.TYPE.upper(), defs.DEFAULT.upper(), defs.REQUIRED.upper(), defs.DESCRIPTION.upper()) ...
Print plugin parameters table.
def output_json(data, code, headers=None): settings = current_app.config.get('RESTFUL_JSON', {}) if current_app.debug: settings.setdefault('indent', 4) settings.setdefault('sort_keys', not PY3) dumped = dumps(data, **settings) + "\n" resp = make_response(dumped, code) resp.headers.ex...
Makes a Flask response with a JSON encoded body
def store_async_marker(async_id, status): logging.debug("Attempting to mark Async %s complete.", async_id) marker = FuriousAsyncMarker.get_by_id(async_id) if marker: logging.debug("Marker already exists for %s.", async_id) return key = FuriousAsyncMarker(id=async_id, status=status).put()...
Persist a marker indicating the Async ran to the datastore.
def load_corpus(path): if not os.path.exists(path): raise ValueError(( "'{}' dataset has not been downloaded, " "use the yellowbrick.download module to fetch datasets" ).format(path)) categories = [ cat for cat in os.listdir(path) if os.path.isdir(os.path....
Loads and wrangles the passed in text corpus by path.
def timeInfo(self): time_info = self._json_struct.get('timeInfo', {}) if not time_info: return None time_info = time_info.copy() if 'timeExtent' in time_info: time_info['timeExtent'] = utils.timetopythonvalue( ti...
Return the time info for this Map Service
def add_intf_router(self, rout_id, tenant_id, subnet_lst): try: for subnet_id in subnet_lst: body = {'subnet_id': subnet_id} intf = self.neutronclient.add_interface_router(rout_id, body=body) ...
Add the interfaces to a router.
def submit_string(self): required = [] optional = [] for file_verifier in self.file_verifiers: if file_verifier.optional: optional.append('[{0}]'.format(file_verifier.filename)) else: required.append(file_verifier.filename) return '...
Return a string specifying the files to submit for this project.
def _serialize(self, uri, node): meta = self._decode_meta(node['meta'], is_published=bool(node['is_published'])) return { 'uri': uri.clone(ext=node['plugin'], version=node['version']), 'content': node['content'], 'meta': meta }
Serialize node result as dict
def feed_data(self, data): if data: self._bytes.append(data) if self.parser.stream: self.parser.stream(self) else: self.parser.buffer.extend(data)
Feed new data into the MultiPart parser or the data stream
def dKdiag_dX(self, dL_dKdiag, X, target): target += 2.*self.mapping.df_dX(dL_dKdiag[:, None], X)*self.mapping.f(X)
Gradient of diagonal of covariance with respect to X.
def _set_combobox(self, attrname, vals, default=0): combobox = getattr(self.w, attrname) for val in vals: combobox.append_text(val) if default > len(vals): default = 0 val = vals[default] combobox.show_text(val) return val
Populate combobox with given list.
def format_output(self, rendered_widgets): ret = [u'<ul class="formfield">'] for i, field in enumerate(self.fields): label = self.format_label(field, i) help_text = self.format_help_text(field, i) ret.append(u'<li>%s %s %s</li>' % ( label, rendered_wid...
This output will yeild all widgets grouped in a un-ordered list
def transformFilter(actor, transformation): tf = vtk.vtkTransformPolyDataFilter() tf.SetTransform(transformation) prop = None if isinstance(actor, vtk.vtkPolyData): tf.SetInputData(actor) else: tf.SetInputData(actor.polydata()) prop = vtk.vtkProperty() prop.DeepCopy(a...
Transform a ``vtkActor`` and return a new object.
def create_entity_type(project_id, display_name, kind): import dialogflow_v2 as dialogflow entity_types_client = dialogflow.EntityTypesClient() parent = entity_types_client.project_agent_path(project_id) entity_type = dialogflow.types.EntityType( display_name=display_name, kind=kind) respons...
Create an entity type with the given display name.
def where(self, relation, filter_fn): assert type(relation).__name__ in {'str','unicode'}, 'where needs the first arg to be a string' assert callable(filter_fn), 'filter_fn needs to be callable' return VList(i for i in self if relation in i._relations() and any(filter_fn(_()) for _ in i[relation...
use this to filter VLists, simply provide a filter function and what relation to apply it to
def makedirs(self, path): root = "" assert path.startswith('/') p = path.strip('/') for item in p.split('/'): root += "/" + item if not self.exists(root): self.makedir(root) return self.find(path)
Recursive storage DirEntry creation function.
def add(self, value): ind = int(self._ind % self.shape) self._pos = self._ind % self.shape self._values[ind] = value if self._ind < self.shape: self._ind += 1 else: self._ind += self._splitValue self._splitPos += self._splitValue self._...
Add a value to the buffer.
def _loadSession(self, filename): try: self.eval(open(filename).read()) except RuntimeError as e: print(e)
Load a recorded session.
def coerce(cls, key, value): if not isinstance(value, MutationList): if isinstance(value, list): return MutationList(value) return Mutable.coerce(key, value) else: return value
Convert list to MutationList.
def ladders(session, game_id): if isinstance(game_id, str): game_id = lookup_game_id(game_id) lobbies = get_lobbies(session, game_id) ladder_ids = set() for lobby in lobbies: ladder_ids |= set(lobby['ladders']) return list(ladder_ids)
Get a list of ladder IDs.
def element_id_by_label(browser, label): label = XPathSelector(browser, unicode('//label[contains(., "%s")]' % label)) if not label: return False return label.get_attribute('for')
Return the id of a label's for attribute
def clean(self): if os.path.exists(self.buildroot): log.info('Clearing the build area.') log.debug('Deleting: %s', self.buildroot) shutil.rmtree(self.buildroot) os.makedirs(self.buildroot)
Clear the contents of the build area.
def cublasCtrmv(handle, uplo, trans, diag, n, A, lda, x, incx): status = _libcublas.cublasCtrmv_v2(handle, _CUBLAS_FILL_MODE[uplo], _CUBLAS_OP[trans], _CUBLAS_DIAG[diag], ...
Matrix-vector product for complex triangular matrix.
def covars_(self): return fill_covars(self._covars_, self.covariance_type, self.n_components, self.n_features)
Return covars as a full matrix.
def _subclassed(base, *classes): return all(map(lambda obj: isinstance(obj, base), classes))
Check if all classes are subclassed from base.
def flush(self, meta=None): pattern = self.basekey(meta) if meta else self.namespace return self.client.delpattern('%s*' % pattern)
Flush all model keys from the database
def _rle_decode(data): if not data: return data new = b'' last = b'' for cur in data: if last == b'\0': new += last * cur last = b'' else: new += last last = bytes([cur]) return new + last
Decodes run-length-encoded `data`.
def contains_pts(self, pts): obj1, obj2 = self.objects arg1 = obj2.contains_pts(pts) arg2 = np.logical_not(obj1.contains_pts(pts)) return np.logical_and(arg1, arg2)
Containment test on arrays.
def match(self, device): return all(match_value(getattr(device, k), v) for k, v in self._match.items())
Check if the device object matches this filter.
def generate_proxy( prefix, base_url='', verify_ssl=True, middleware=None, append_middleware=None, cert=None, timeout=None): middleware = list(middleware or HttpProxy.proxy_middleware) middleware += list(append_middleware or []) return type('ProxyClass', (HttpProxy,), { 'base_url': b...
Generate a ProxyClass based view that uses the passed base_url.
def _new_mock_response(self, response, file_path): mock_response = copy.copy(response) mock_response.body = Body(open(file_path, 'rb')) mock_response.fields = NameValueRecord() for name, value in response.fields.get_all(): mock_response.fields.add(name, value) mock_re...
Return a new mock Response with the content.
def git_tag(repo_dir, tagname, message=None, force=True): message = message or "%s" % tagname command = ['git', 'tag', '--annotate', '--message', message] if force: command.append('--force') command.append(tagname) return execute_git_command(command, repo_dir=repo_dir)
Create an annotated tag at the current head.
def _convert_date_time_string(dt_string): dt_string = dt_string.split('.')[0] dt_obj = datetime.strptime(dt_string, '%Y%m%d%H%M%S') return dt_obj.strftime('%Y-%m-%d %H:%M:%S')
convert string to date time object
def bitop_not(self, dest, key): return self.execute(b'BITOP', b'NOT', dest, key)
Perform bitwise NOT operations between strings.
def parse_row(self, row, row_index, cell_mode=CellMode.cooked): return [self.parse_cell(cell, (col_index, row_index), cell_mode) \ for col_index, cell in enumerate(row)]
Parse a row according to the given cell_mode.
def connect(self, host, port): self._connected = False self._host = "%s:%d" % (host, port) self._closed = False self._close_info = { 'reply_code': 0, 'reply_text': 'failed to connect to %s' % (self._host), 'class_id': 0, 'method_id': 0 ...
Connect to a host and port.
def hold_policy(document, policy, server=False): old_policy = document._hold document._hold = policy try: yield finally: if server and not old_policy: document.unhold() else: document._hold = old_policy
Context manager to temporary override the hold policy.
def _version_less_than_or_equal_to(self, v1, v2): from distutils.version import LooseVersion return LooseVersion(v1) <= LooseVersion(v2)
Returns true if v1 <= v2.
def backtrack(self, source): key = self.get_tok(source) s = self[key]() meta = s.metadata['original_source'] cls = meta['cls'] args = meta['args'] kwargs = meta['kwargs'] cls = import_name(cls) sout = cls(*args, **kwargs) sout.metadata = s.metadata...
Given a unique key in the store, recreate original source
def on_rule(self, *args): if self.rule is None: return self.rule.connect(self._listen_to_rule)
Make sure to update when the rule changes
def standard_block(self, _bytes): self.out(self.BLOCK_STANDARD) self.out(self.LH(1000)) self.out(self.LH(len(_bytes) + 1)) checksum = 0 for i in _bytes: checksum ^= (int(i) & 0xFF) self.out(i) self.out(checksum)
Adds a standard block of bytes
def _run_lint_on_file_stamped(*args): stamp_args, stamp_kwargs = _run_lint_on_file_stamped_args(*args, **{}) return jobstamp.run(_run_lint_on_file_exceptions, *stamp_args, **stamp_kwargs)
Run linter functions on file_path, stamping in stamp_file_path.
def dim_range_key(eldim): if isinstance(eldim, dim): dim_name = repr(eldim) if dim_name.startswith("'") and dim_name.endswith("'"): dim_name = dim_name[1:-1] else: dim_name = eldim.name return dim_name
Returns the key to look up a dimension range.
def _summarize_o_mutation_type(model): from nautilus.api.util import summarize_mutation_io object_type_name = get_model_string(model) return summarize_mutation_io( name=object_type_name, type=_summarize_object_type(model), required=False )
This function create the actual mutation io summary corresponding to the model
def getidfkeyswithnodes(): idf = IDF(StringIO("")) keys = idfobjectkeys(idf) keysfieldnames = ((key, idf.newidfobject(key.upper()).fieldnames) for key in keys) keysnodefdnames = ((key, (name for name in fdnames if (name.endswith('Node_Name')))) ...
return a list of keys of idfobjects that hve 'None Name' fields
def _viewset_results(self): results = [] try: response = self._viewset_method( self._viewset.request, *self._request.args, **self._request.kwargs ) if response.status_code == 200: results = response.data if not isinstanc...
Parse results from the viewset response.
def VSInstallDir(self): name = 'Microsoft Visual Studio %0.1f' % self.vc_ver default = os.path.join(self.ProgramFilesx86, name) return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default
Microsoft Visual Studio directory.
def submit_form_id(step, id): form = world.browser.find_element_by_xpath(str('id("{id}")'.format(id=id))) form.submit()
Submit the form having given id.
def count_empty(self, field): try: df2 = self.df[[field]] vals = where(df2.applymap(lambda x: x == '')) num = len(vals[0]) except Exception as e: self.err(e, "Can not count empty values") return self.ok("Found", num, "empty rows in colu...
List of empty row indices
def display_list(prefix, l, color): for itm in l: print colored(prefix + itm['path'], color)
Prints a file list to terminal, allows colouring output.
def impact_parameter(a, R, inc, ecc=0, w=0, return_occ=False): b_tra = a*AU*np.cos(inc)/(R*RSUN) * (1-ecc**2)/(1 + ecc*np.sin(w)) if return_occ: b_tra = a*AU*np.cos(inc)/(R*RSUN) * (1-ecc**2)/(1 - ecc*np.sin(w)) return b_tra, b_occ else: return b_tra
a in AU, R in Rsun, inc & w in radians
def route(obj, rule, *args, **kwargs): def decorator(cls): endpoint = kwargs.get('endpoint', camel_to_snake(cls.__name__)) kwargs['view_func'] = cls.as_view(endpoint) obj.add_url_rule(rule, *args, **kwargs) return cls return decorator
Decorator for the View classes.
def _assemble_regulate_amount(stmt): obj_str = _assemble_agent_str(stmt.obj) if stmt.subj is not None: subj_str = _assemble_agent_str(stmt.subj) if isinstance(stmt, ist.IncreaseAmount): rel_str = ' increases the amount of ' elif isinstance(stmt, ist.DecreaseAmount): ...
Assemble RegulateAmount statements into text.
def getPitchForIntervals(data, tgFN, tierName): tg = tgio.openTextgrid(tgFN) data = tg.tierDict[tierName].getValuesInIntervals(data) data = [dataList for _, dataList in data] return data
Preps data for use in f0Morph
def createElement(self, token): name = token["name"] namespace = token.get("namespace", self.defaultNamespace) element = self.elementClass(name, namespace) element.attributes = token["data"] return element
Create an element but don't insert it anywhere
def check_threat_timeout(self): for id in self.threat_vehicles.keys(): if self.threat_vehicles[id].update_time == 0: self.threat_vehicles[id].update_time = self.get_time() dt = self.get_time() - self.threat_vehicles[id].update_time if dt > self.ADSB_settings.t...
check and handle threat time out
def fill_view(self, view): other = view.hist _other_x_center = other.axis(0).GetBinCenter _other_y_center = other.axis(1).GetBinCenter _other_z_center = other.axis(2).GetBinCenter _other_get = other.GetBinContent _other_get_bin = super(_HistBase, other).GetBin oth...
Fill this histogram from a view of another histogram
def curve(self): return HelicalCurve.pitch_and_radius( self.major_pitch, self.major_radius, handedness=self.major_handedness)
Curve of the super helix.
def from_iterable(cls, iterable: Iterable) -> 'List': iterator = iter(iterable) def recurse() -> List: try: value = next(iterator) except StopIteration: return List.empty() return List.unit(value).append(recurse()) return List.e...
Create list from iterable.
def returner(ret): try: with _get_serv(ret, commit=True) as cur: sql = cur.execute(sql, (ret['fun'], ret['jid'], psycopg2.extras.Json(ret['return']), ret['id'], ret.get('success', False), ...
Return data to a Pg server
def icanhazascii(client, channel, nick, message, found): global FLOOD_RATE, LAST_USED now = time.time() if channel in LAST_USED and (now - LAST_USED[channel]) < FLOOD_RATE: return LAST_USED[channel] = now return found
A plugin for generating showing ascii artz
def _import(func): func_name = func.__name__ if func_name in globals(): return func_name module_name = func.__module__ submodules = module_name.split('.') if submodules[0] in globals(): return module_name + '.' + func_name for i in range(len(submod...
Return the namespace path to the function
def remove_for_target(self, target, classpath_elements): self._classpaths.remove_for_target(target, self._wrap_path_elements(classpath_elements))
Removes the given entries for the target.
def create_proteinquant_lookup(fns, pqdb, poolnames, protacc_colnr, ms1_qcolpattern=None, isobqcolpattern=None, psmnrpattern=None, probcolpattern=None, fdrcolpattern=None, pepcolpattern=None): patterns = [ms1_qcolpattern, p...
Calls lower level function to create a protein quant lookup
def run(samples, run_parallel): to_process = [] extras = [] for data in (xs[0] for xs in samples): hlacaller = tz.get_in(["config", "algorithm", "hlacaller"], data) if hlacaller: to_process.append(data) else: extras.append([data]) processed = run_parallel(...
Run HLA detection on the input samples.
def validate_name(err, value, source): 'Tests a manifest name value for trademarks.' ff_pattern = re.compile('(mozilla|firefox)', re.I) err.metadata['name'] = value if ff_pattern.search(value): err.warning( ('metadata_helpers', '_test_name', 'trademark'), 'Add-on has pote...
Tests a manifest name value for trademarks.
def _guess_type(self, full_path): magic = self._match_magic(full_path) if magic is not None: return (mimetypes.guess_type(magic.old_path(full_path))[0] or 'text/plain') else: return mimetypes.guess_type(full_path)[0] or 'text/plain'
Guess the mime type magically or using the mimetypes module.
def store_file(self, folder, name): path = os.path.join(folder, name) length = self.headers['content-length'] with open(path, 'wb') as sample: sample.write(self.rfile.read(int(length))) return path
Stores the uploaded file in the given path.
def wait_for_stats(self): logging.debug("waiting for statistics to finish") for job in self.stat_jobs: job.get() sleep(2)
Make sure all jobs are finished.
def define_log_processors(): return [ structlog.processors.TimeStamper(fmt="iso"), _structlog_default_keys_processor, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, ]
log processors that structlog executes before final rendering
def find_associated_with_address(self, instance): objects = [] objects += list(Project.objects.filter(address=instance)) objects += list(Organization.objects.filter(address=instance)) return objects
Returns list with projects and organizations associated with given address
def page_title(step, title): with AssertContextManager(step): assert_equals(world.browser.title, title)
Check that the page title matches the given one.
def run_validators(self, values): for val in values: super(CommaSepFloatField, self).run_validators(val)
Run validators for each item separately.
def _build_metric_list_to_collect(self, additional_metrics): metrics_to_collect = {} for default_metrics in itervalues(self.DEFAULT_METRICS): metrics_to_collect.update(default_metrics) for option in additional_metrics: additional_metrics = self.AVAILABLE_METRICS.get(optio...
Build the metric list to collect based on the instance preferences.
def list_images(path=['.']): for image_dir in set(path): if not os.path.isdir(image_dir): continue for filename in os.listdir(image_dir): bname, ext = os.path.splitext(filename) if ext.lower() not in VALID_IMAGE_EXTS: continue filepath ...
Return list of image files
def query_paths(self): output = self.namespace.alias_to_query_paths.get(self.name) if output: return output Log.error("Can not find index {{index|quote}}", index=self.name)
RETURN A LIST OF ALL NESTED COLUMNS
def UpdateProtoResources(self, status): user_cpu = status.cpu_time_used.user_cpu_time system_cpu = status.cpu_time_used.system_cpu_time self.context.client_resources.cpu_usage.user_cpu_time += user_cpu self.context.client_resources.cpu_usage.system_cpu_time += system_cpu user_cpu_total = self.contex...
Save cpu and network stats, check limits.
def robo_avatar_url(user_data, size=80): hash = md5(str(user_data).strip().lower().encode('utf-8')).hexdigest() url = "https://robohash.org/{hash}.png?size={size}x{size}".format( hash=hash, size=size) return url
Return the gravatar image for the given email address.
def raw_message(self, message, silent=False): vim = self._vim cmd = 'echo "{}"'.format(message.replace('"', '\\"')) if silent: cmd = 'silent ' + cmd if self.isneovim: vim.async_call(vim.command, cmd) else: vim.command(cmd)
Display a message in the Vim status line.
def list2dict(list_of_options): d = {} for key, value in list_of_options: d[key] = value return d
Transforms a list of 2 element tuples to a dictionary
def run(conf, only): with errorprint(): config = ConfModule(conf) spawned = config.spawn_uwsgi(only) for alias, pid in spawned: click.secho("Spawned uWSGI for configuration aliased '%s'. PID %s" % (alias, pid), fg='green')
Runs uWSGI passing to it using the default or another `uwsgiconf` configuration module.
def _filter_options(self, aliases=True, comments=True, historical=True): options = [] if not aliases: options.append('noaliases') if not comments: options.append('nocomments') if not historical: options.append('nohistorical') return options
Converts a set of boolean-valued options into the relevant HTTP values.
def printAggregateJobStats(self, properties, childNumber): for job in self.jobsToReport: lf = lambda x: "%s:%s" % (x, str(x in properties)) print("\t".join(("JOB:%s" % job, "LOG_FILE:%s" % job.logJobStoreFileID, "TRYS_REMAINING:%i...
Prints a job's ID, log file, remaining tries, and other properties.
def set(self, key, val): data = self.get_data(True) if data is not None: data[key] = val else: raise RuntimeError("No task is currently running")
Set value stored for current running task.
async def send_http_response(writer, http_code: int, headers: List[Tuple[str, str]], content: bytes, http_status: str= None ) -> None: if not http_status: http_sta...
generate http response payload and send to writer
def calculate_y_ticks(self, plot_height): calibrated_data_min = self.calibrated_data_min calibrated_data_max = self.calibrated_data_max calibrated_data_range = calibrated_data_max - calibrated_data_min ticker = self.y_ticker y_ticks = list() for tick_value, tick_label in ...
Calculate the y-axis items dependent on the plot height.
def performAction(self, action): gs = [g for g in self.case.online_generators if g.bus.type !=REFERENCE] assert len(action) == len(gs) logger.info("Action: %s" % list(action)) for i, g in enumerate(gs): g.p = action[i] NewtonPF(self.case, verbose=False).solve() ...
Perform an action on the world that changes it's internal state.
def add_jardiff_optgroup(parser): og = parser.add_argument_group("JAR Checking Options") og.add_argument("--ignore-jar-entry", action="append", default=[]) og.add_argument("--ignore-jar-signature", action="store_true", default=False, help="Ignore JAR signing changes")...
option group specific to the tests in jardiff
def _get_pool(name=None, session=None): if session is None: session = _get_session() pools = session.xenapi.pool.get_all() for pool in pools: pool_record = session.xenapi.pool.get_record(pool) if name in pool_record.get('name_label'): return pool return None
Get XEN resource pool object reference
def localize_field(self, value): if self.default is not None: if value is None or value == '': value = self.default return value or ''
Method that must transform the value from object to localized string
def add_warning (self, s, tag=None): item = (tag, s) if item not in self.warnings and \ tag not in self.aggregate.config["ignorewarnings"]: self.warnings.append(item)
Add a warning string.
def combine_reports(original, new): if original is None: return new report = {} report['name'] = original['name'] report['source_digest'] = original['source_digest'] coverage = [] for original_num, new_num in zip(original['coverage'], new['coverage']): if original_num is None: ...
Combines two gcov reports for a file into one by adding the number of hits on each line
def add_to_collection(self, request, pk=None): entity = self.get_object() if 'ids' not in request.data: return Response({"error": "`ids` parameter is required"}, status=status.HTTP_400_BAD_REQUEST) for collection_id in request.data['ids']: self._get_collection_for_user(co...
Add Entity to a collection.
def create_superuser(self, email, password, **extra_fields): extra_fields.setdefault('is_staff', True) extra_fields.setdefault('is_superuser', True) if extra_fields.get('is_staff') is not True: raise ValueError('Superuser must have is_staff=True.') if extra_fields.get('is_sup...
Save new User with is_staff and is_superuser set to True
def use_any_status_composition_view(self): self._operable_views['composition'] = ANY_STATUS for session in self._get_provider_sessions(): try: session.use_any_status_composition_view() except AttributeError: pass
Pass through to provider CompositionLookupSession.use_any_status_composition_view
def fill_fw_dict_from_db(self, fw_data): rule_dict = fw_data.get('rules').get('rules') fw_dict = {'fw_id': fw_data.get('fw_id'), 'fw_name': fw_data.get('name'), 'firewall_policy_id': fw_data.get('firewall_policy_id'), 'fw_type': fw_data.get('fw_ty...
This routine is called to create a local fw_dict with data from DB.
def day_start(self): day_start_minutes = self.get("day_start_minutes") hours, minutes = divmod(day_start_minutes, 60) return dt.time(hours, minutes)
Start of the hamster day.