hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
48bc4c72c304a6d7aeeb0dab781f82a2616fe4d3 | 4,766 | py | Python | test/test_memory_leaks.py | elventear/psutil | c159f3352dc5f699143960840e4f6535174690ed | [
"BSD-3-Clause"
] | 4 | 2015-01-06T01:39:12.000Z | 2019-12-09T10:27:44.000Z | test/test_memory_leaks.py | elventear/psutil | c159f3352dc5f699143960840e4f6535174690ed | [
"BSD-3-Clause"
] | null | null | null | test/test_memory_leaks.py | elventear/psutil | c159f3352dc5f699143960840e4f6535174690ed | [
"BSD-3-Clause"
] | 2 | 2016-10-21T03:15:34.000Z | 2018-12-10T03:40:50.000Z | #!/usr/bin/env python
#
# $Id$
#
"""
Note: this is targeted for python 2.x.
To run it under python 3.x you need to use 2to3 tool first:
$ 2to3 -w test/test_memory_leaks.py
"""
import os
import gc
import sys
import unittest
import psutil
from test_psutil import reap_children, skipUnless, skipIf, \
POSIX, LINUX, WINDOWS, OSX, BSD
LOOPS = 1000
TOLERANCE = 4096
def test_main():
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(TestProcessObjectLeaks))
test_suite.addTest(unittest.makeSuite(TestModuleFunctionsLeaks))
unittest.TextTestRunner(verbosity=2).run(test_suite)
if __name__ == '__main__':
test_main()
| 24.822917 | 82 | 0.599245 |
48bc6e9f0498c16dbcd64706a2f744500361365e | 8,516 | py | Python | ga4gh/search/compliance/util/local_server.py | ga4gh-discovery/ga4gh-search-compliance | 58c693ca2f96d145f4ccba08aec23e4ebe1f7599 | [
"Apache-2.0"
] | null | null | null | ga4gh/search/compliance/util/local_server.py | ga4gh-discovery/ga4gh-search-compliance | 58c693ca2f96d145f4ccba08aec23e4ebe1f7599 | [
"Apache-2.0"
] | null | null | null | ga4gh/search/compliance/util/local_server.py | ga4gh-discovery/ga4gh-search-compliance | 58c693ca2f96d145f4ccba08aec23e4ebe1f7599 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Module compliance_suite.report_server.py
This module contains class definition of small web server utility. Serves final
report results as HTML.
"""
import datetime
import time
import http.server
import socketserver
import os
import logging
import inspect
import socket
import webbrowser
import shutil
import sys
import threading
import json
import jinja2 as j2
import ga4gh.search.compliance as pkg_dir
from ga4gh.search.compliance.config.configuration import Configuration
def capitalize(text):
"""capitalizes a word, for use in rendering template
Args:
text (str): word to capitalize
Returns:
capitalized (str): capitalized word
"""
return text[0].upper() + text[1:]
| 33.136187 | 83 | 0.516557 |
48bc7c9db7dabf6628ee230ef0c1f45b6794af0d | 2,146 | py | Python | api/routefinder.py | shingkid/DrWatson-ToTheRescue_SCDFXIBM | 009d2b4599b276ea760dbd888718a25332893075 | [
"MIT"
] | 1 | 2020-06-12T10:24:31.000Z | 2020-06-12T10:24:31.000Z | api/routefinder.py | yankai364/Dr-Watson | 22bd885d028e118fa5abf5a9d0ea373b7020ca1d | [
"MIT"
] | 3 | 2020-09-24T15:36:33.000Z | 2022-02-10T02:32:42.000Z | api/routefinder.py | shingkid/DrWatson-ToTheRescue_SCDFXIBM | 009d2b4599b276ea760dbd888718a25332893075 | [
"MIT"
] | 1 | 2020-06-14T10:09:58.000Z | 2020-06-14T10:09:58.000Z | import csv
import pandas as pd
import numpy as np
import networkx as nx
| 24.11236 | 83 | 0.547996 |
48bd4369d1643a3a728218455de1ea42bfd683e8 | 8,084 | py | Python | lectures/extensions/hyperbolic_discounting/replication_code/.mywaflib/waflib/extras/rst.py | loikein/ekw-lectures | a2f5436f10515ab26eab323fca8c37c91bdc5dcd | [
"MIT"
] | 4 | 2019-11-15T15:21:27.000Z | 2020-07-08T15:04:30.000Z | lectures/extensions/hyperbolic_discounting/replication_code/.mywaflib/waflib/extras/rst.py | loikein/ekw-lectures | a2f5436f10515ab26eab323fca8c37c91bdc5dcd | [
"MIT"
] | 9 | 2019-11-18T15:54:36.000Z | 2020-07-14T13:56:53.000Z | lectures/extensions/hyperbolic_discounting/replication_code/.mywaflib/waflib/extras/rst.py | loikein/ekw-lectures | a2f5436f10515ab26eab323fca8c37c91bdc5dcd | [
"MIT"
] | 3 | 2021-01-25T15:41:30.000Z | 2021-09-21T08:51:36.000Z | #!/usr/bin/env python
# Jrme Carretero, 2013 (zougloub)
"""
reStructuredText support (experimental)
Example::
def configure(conf):
conf.load('rst')
if not conf.env.RST2HTML:
conf.fatal('The program rst2html is required')
def build(bld):
bld(
features = 'rst',
type = 'rst2html', # rst2html, rst2pdf, ...
source = 'index.rst', # mandatory, the source
deps = 'image.png', # to give additional non-trivial dependencies
)
By default the tool looks for a set of programs in PATH.
The tools are defined in `rst_progs`.
To configure with a special program use::
$ RST2HTML=/path/to/rst2html waf configure
This tool is experimental; don't hesitate to contribute to it.
"""
import re
from waflib import Errors
from waflib import Logs
from waflib import Node
from waflib import Task
from waflib import Utils
from waflib.TaskGen import before_method
from waflib.TaskGen import feature
rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
def configure(self):
"""
Try to find the rst programs.
Do not raise any error if they are not found.
You'll have to use additional code in configure() to die
if programs were not found.
"""
for p in rst_progs:
self.find_program(p, mandatory=False)
| 29.289855 | 114 | 0.593147 |
48bf7ed2085cdea54fbe4837b4e8e76a67b7373c | 1,339 | py | Python | mofa/analytics/tests/test_participationAnalytics/test_quizParticipation.py | BoxInABoxICT/BoxPlugin | ad351978faa37ab867a86d2f4023a2b3e5a2ce19 | [
"Apache-2.0"
] | null | null | null | mofa/analytics/tests/test_participationAnalytics/test_quizParticipation.py | BoxInABoxICT/BoxPlugin | ad351978faa37ab867a86d2f4023a2b3e5a2ce19 | [
"Apache-2.0"
] | null | null | null | mofa/analytics/tests/test_participationAnalytics/test_quizParticipation.py | BoxInABoxICT/BoxPlugin | ad351978faa37ab867a86d2f4023a2b3e5a2ce19 | [
"Apache-2.0"
] | null | null | null | import unittest
import json
import os
from unittest.mock import MagicMock, patch
from analytics.src.participationAnalytics import quizParticipation
| 32.658537 | 77 | 0.657207 |
48bf9da5843cf6858ec4f1074f331bc92553a1cd | 1,171 | py | Python | visualize_cam.py | mhamdan91/Gradcam_eager | ee732ff65256ef1692caf94c8c0b4bdbe22d2d1d | [
"MIT"
] | 2 | 2019-09-19T18:08:26.000Z | 2019-10-11T12:42:22.000Z | visualize_cam.py | mhamdan91/Gradcam_eager | ee732ff65256ef1692caf94c8c0b4bdbe22d2d1d | [
"MIT"
] | null | null | null | visualize_cam.py | mhamdan91/Gradcam_eager | ee732ff65256ef1692caf94c8c0b4bdbe22d2d1d | [
"MIT"
] | null | null | null | # from utils import Sample_main
import gradcam_main
import numpy as np
import tensorflow as tf
import argparse
import os
tf.logging.set_verbosity(tf.logging.ERROR) # disable to see tensorflow warnings
# In case referenced by other modules
if __name__ == '__main__':
main()
| 34.441176 | 150 | 0.64304 |
48bfa6a9870aa2f95044df7a3145739de4a0dc15 | 1,681 | py | Python | tests/molecular/molecules/building_block/test_with_functional_groups.py | andrewtarzia/stk | 1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a | [
"MIT"
] | 21 | 2018-04-12T16:25:24.000Z | 2022-02-14T23:05:43.000Z | tests/molecular/molecules/building_block/test_with_functional_groups.py | JelfsMaterialsGroup/stk | 0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2 | [
"MIT"
] | 8 | 2019-03-19T12:36:36.000Z | 2020-11-11T12:46:00.000Z | tests/molecular/molecules/building_block/test_with_functional_groups.py | supramolecular-toolkit/stk | 0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2 | [
"MIT"
] | 5 | 2018-08-07T13:00:16.000Z | 2021-11-01T00:55:10.000Z | from ..utilities import (
has_same_structure,
is_equivalent_molecule,
is_equivalent_building_block,
are_equivalent_functional_groups,
)
def test_with_functional_groups(building_block, get_functional_groups):
"""
Test :meth:`.BuildingBlock.with_functional_groups`.
Parameters
----------
building_block : :class:`.BuildingBlock`
The building block to test.
get_functional_groups : :class:`callable`
Takes a single parameter, `building_block` and returns the
`functional_groups` parameter to use for this test.
Returns
-------
None : :class:`NoneType`
"""
# Save clone to check immutability.
clone = building_block.clone()
_test_with_functional_groups(
building_block=building_block,
functional_groups=tuple(get_functional_groups(building_block)),
)
is_equivalent_building_block(building_block, clone)
has_same_structure(building_block, clone)
def _test_with_functional_groups(building_block, functional_groups):
"""
Test :meth:`.BuildingBlock.with_functional_groups`.
Parameters
----------
building_block : :class:`.BuildingBlock`
The building block to test.
functional_groups : :class:`tuple` of :class:`.FunctionalGroup`
The functional groups the new building block should hold.
Returns
-------
None : :class:`NoneType`
"""
new = building_block.with_functional_groups(functional_groups)
are_equivalent_functional_groups(
new.get_functional_groups(),
functional_groups,
)
is_equivalent_molecule(building_block, new)
has_same_structure(building_block, new)
| 26.68254 | 71 | 0.702558 |
48bfa7e063bfbe3193516ebfca7f4a3ae8dc8a0a | 9,512 | py | Python | tests/master/test_master.py | bk-mtg/piwheels | 67152dd1cfd5bd03ea90a8f0255103a9ee9c71d6 | [
"BSD-3-Clause"
] | null | null | null | tests/master/test_master.py | bk-mtg/piwheels | 67152dd1cfd5bd03ea90a8f0255103a9ee9c71d6 | [
"BSD-3-Clause"
] | null | null | null | tests/master/test_master.py | bk-mtg/piwheels | 67152dd1cfd5bd03ea90a8f0255103a9ee9c71d6 | [
"BSD-3-Clause"
] | null | null | null | # The piwheels project
# Copyright (c) 2017 Ben Nuttall <https://github.com/bennuttall>
# Copyright (c) 2017 Dave Jones <dave@waveform.org.uk>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
from unittest import mock
from threading import Thread
import pytest
from conftest import find_message
from piwheels import __version__, protocols, transport
from piwheels.master import main, const
def test_help(capsys):
with pytest.raises(SystemExit):
main(['--help'])
out, err = capsys.readouterr()
assert out.startswith('usage:')
assert '--pypi-xmlrpc' in out
def test_version(capsys):
with pytest.raises(SystemExit):
main(['--version'])
out, err = capsys.readouterr()
assert out.strip() == __version__
def test_no_root(caplog):
with mock.patch('os.geteuid') as geteuid:
geteuid.return_value = 0
assert main([]) != 0
assert find_message(caplog.records,
message='Master must not be run as root')
def test_quit_control(mock_systemd, master_thread, master_control):
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
master_control.send_msg('QUIT')
thread.join(10)
assert not thread.is_alive()
def test_system_exit(mock_systemd, master_thread, caplog):
with mock.patch('piwheels.master.PiWheelsMaster.main_loop') as main_loop:
main_loop.side_effect = SystemExit(1)
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
thread.join(10)
assert not thread.is_alive()
assert find_message(caplog.records, message='shutting down on SIGTERM')
def test_system_ctrl_c(mock_systemd, master_thread, caplog):
with mock.patch('piwheels.master.PiWheelsMaster.main_loop') as main_loop:
main_loop.side_effect = KeyboardInterrupt()
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
thread.join(10)
assert not thread.is_alive()
assert find_message(caplog.records, message='shutting down on Ctrl+C')
def test_bad_control(mock_systemd, master_thread, master_control, caplog):
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
master_control.send(b'FOO')
master_control.send_msg('QUIT')
thread.join(10)
assert not thread.is_alive()
assert find_message(caplog.records, message='unable to deserialize data')
def test_status_passthru(tmpdir, mock_context, mock_systemd, master_thread):
with mock_context().socket(transport.PUSH, protocol=protocols.monitor_stats) as int_status, \
mock_context().socket(transport.SUB, protocol=reversed(protocols.monitor_stats)) as ext_status:
ext_status.connect('ipc://' + str(tmpdir.join('status-queue')))
ext_status.subscribe('')
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
# Wait for the first statistics message (from BigBrother) to get the
# SUB queue working
msg, data = ext_status.recv_msg()
assert msg == 'STATS'
data['builds_count'] = 12345
int_status.connect(const.INT_STATUS_QUEUE)
int_status.send_msg('STATS', data)
# Try several times to read the passed-thru message; other messages
# (like stats from BigBrother) will be sent to ext-status too
for i in range(3):
msg, copy = ext_status.recv_msg()
if msg == 'STATS':
assert copy == data
break
else:
assert False, "Didn't see modified STATS passed-thru"
def test_kill_control(mock_systemd, master_thread, master_control):
with mock.patch('piwheels.master.SlaveDriver.kill_slave') as kill_slave:
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
master_control.send_msg('KILL', 1)
master_control.send_msg('QUIT')
thread.join(10)
assert not thread.is_alive()
assert kill_slave.call_args == mock.call(1)
def test_pause_resume(mock_systemd, master_thread, master_control, caplog):
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
master_control.send_msg('PAUSE')
master_control.send_msg('RESUME')
master_control.send_msg('QUIT')
thread.join(10)
assert not thread.is_alive()
assert find_message(caplog.records, message='pausing operations')
assert find_message(caplog.records, message='resuming operations')
def test_new_monitor(mock_systemd, master_thread, master_control, caplog):
with mock.patch('piwheels.master.SlaveDriver.list_slaves') as list_slaves:
thread = master_thread()
thread.start()
assert mock_systemd._ready.wait(10)
master_control.send_msg('HELLO')
master_control.send_msg('QUIT')
thread.join(10)
assert not thread.is_alive()
assert find_message(caplog.records,
message='sending status to new monitor')
assert list_slaves.call_args == mock.call()
def test_debug(mock_systemd, master_thread, master_control, caplog):
thread = master_thread(args=['--debug', 'master.the_scribe',
'--debug', 'master.the_architect'])
thread.start()
assert mock_systemd._ready.wait(10)
master_control.send_msg('QUIT')
thread.join(10)
assert not thread.is_alive()
assert find_message(caplog.records, name='master.the_scribe',
levelname='DEBUG', message='<< QUIT None')
assert find_message(caplog.records, name='master.the_architect',
levelname='DEBUG', message='<< QUIT None')
| 38.983607 | 107 | 0.678616 |
48c0042b454fab2f52a5d4277d95dcb8ccdc7da6 | 1,254 | py | Python | dibase/rpi/gpio/test/pinid-platformtests.py | ralph-mcardell/dibase-rpi-python | 724c18d1f3c6745b3dddf582ea2272ed4e2df8ac | [
"BSD-3-Clause"
] | null | null | null | dibase/rpi/gpio/test/pinid-platformtests.py | ralph-mcardell/dibase-rpi-python | 724c18d1f3c6745b3dddf582ea2272ed4e2df8ac | [
"BSD-3-Clause"
] | null | null | null | dibase/rpi/gpio/test/pinid-platformtests.py | ralph-mcardell/dibase-rpi-python | 724c18d1f3c6745b3dddf582ea2272ed4e2df8ac | [
"BSD-3-Clause"
] | null | null | null | '''
Part of the dibase.rpi.gpio.test package.
GPIO pin id support classes' platform tests.
Underlying GPIO pin ids are those used by the Linux gpiolib and used
to identify a device's GPIO pins in the Linux sys filesystem GPIO
sub-tree.
Developed by R.E. McArdell / Dibase Limited.
Copyright (c) 2012 Dibase Limited
License: dual: GPL or BSD.
'''
import unittest
import sys
if __name__ == '__main__':
# Add path to directory containing the dibase package directory
sys.path.insert(0, './../../../..')
from dibase.rpi.gpio import pinid
if __name__ == '__main__':
unittest.main()
| 33.891892 | 83 | 0.719298 |
48c34cc81742111643982bd0d218ec0140e5a1a0 | 7,503 | py | Python | analysis.py | tj294/2.5D-RB | f72f79d349ff27a058f503ccca58d63babb298e2 | [
"MIT"
] | null | null | null | analysis.py | tj294/2.5D-RB | f72f79d349ff27a058f503ccca58d63babb298e2 | [
"MIT"
] | null | null | null | analysis.py | tj294/2.5D-RB | f72f79d349ff27a058f503ccca58d63babb298e2 | [
"MIT"
] | null | null | null | """
Analysis code for plotting vertical flux transport and/or a gif of temperature,
velocity and KE from the merged output of a Dedalus Rayleigh-Brnard code.
Author: Tom Joshi-Cale
"""
# ====================
# IMPORTS
# ====================
import numpy as np
import h5py
import argparse
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import pathlib
import os
import shutil
import time
import imageio
from dedalus import public as de
from dedalus.tools import post
# ====================
# CLA PARSING
# ====================
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input", help="Folder where the processing data is stored", required=True
)
parser.add_argument(
"-t", "--heatmap", help="Plot a gif of the temperature heatmap", action="store_true"
)
parser.add_argument(
"-f", "--flux", help="Plot the average flux contributions", action="store_true"
)
parser.add_argument(
"-k", "--KE", help="Plot the kinetic energy only", action="store_true"
)
args = parser.parse_args()
direc = os.path.normpath(args.input) + "/"
with h5py.File(direc + "run_params/run_params_s1.h5", "r") as f:
a = int(np.array(f["tasks"]["a"]))
y = de.Fourier("y", 256, interval=(0, a), dealias=3 / 2)
z = de.Chebyshev("z", 64, interval=(0, 1), dealias=3 / 2)
y = np.array(y.grid(1))
z = np.array(z.grid(1))
# ====================
# Plot Fluxes
# ====================
if args.flux:
avg_t_start = float(input("Start average at: "))
avg_t_stop = float(input("End average at: "))
with h5py.File(direc + "analysis/analysis_s1.h5", "r") as file:
L_cond_arr = np.array(file["tasks"]["L_cond"])[:, 0]
L_conv_arr = np.array(file["tasks"]["L_conv"])[:, 0]
KE = np.array(file["tasks"]["KE"])[:, 0]
snap_t = np.array(file["scales"]["sim_time"])
if (
(avg_t_start <= snap_t[0])
or (avg_t_start >= snap_t[-1])
or (avg_t_stop <= snap_t[0])
or (avg_t_stop >= snap_t[-1])
):
print(
"Average time period out of simulation range: {} -> {}".format(
snap_t[0], snap_t[-1]
)
)
pass
ASI = np.abs(snap_t - avg_t_start).argmin()
if np.isnan(avg_t_stop):
AEI = -1
else:
AEI = np.abs(snap_t - avg_t_stop).argmin()
avg_t_range = snap_t[AEI] - snap_t[ASI]
print("Averaging between {} and {}".format(snap_t[ASI], snap_t[AEI]))
mean_L_cond = np.mean(np.array(L_cond_arr[ASI:AEI]), axis=0)
mean_L_conv = np.mean(np.array(L_conv_arr[ASI:AEI]), axis=0)
mean_L_tot = mean_L_cond + mean_L_conv
del_L = np.max(np.abs(1.0 - mean_L_tot))
print("max del_L = {}".format(del_L))
fig = plt.figure(figsize=(6, 6))
KE_ax = fig.add_subplot(311)
KE_ax.plot(snap_t, KE, "k", label="Kinetic Energy")
KE_ax.set_xlabel(r"time [$\tau_\kappa$]")
KE_ax.set_ylabel("KE")
KE_ax.axvspan(
snap_t[ASI], snap_t[AEI], color="r", alpha=0.5, label="Flux averaging"
)
L_ax = fig.add_subplot(212)
L_ax.plot(z, mean_L_cond, "r", linestyle="-", label=r"$L_{cond}$")
L_ax.plot(z, mean_L_conv, "g", linestyle="-", label=r"$L_{conv}$")
L_ax.plot(z, mean_L_tot, "k", ls="-", label=r"$L_{total}$")
L_ax.set_xlabel("z")
L_ax.set_ylabel("L")
L_ax.legend()
plt.savefig(direc + "fluxes.png")
plt.show()
plt.close()
# ====================
# Plot heatmap
# ====================
if args.heatmap:
filenames = []
os.makedirs(direc + "figure", exist_ok=True)
with h5py.File(direc + "analysis/analysis_s1.h5", "r") as file:
KE = np.array(file["tasks"]["KE"])[:, 0]
with h5py.File(direc + "snapshots/snapshots_s1.h5", "r") as file:
T = np.array(file["tasks"]["T"])
v = np.array(file["tasks"]["v"])
w = np.array(file["tasks"]["w"])
snap_t = np.array(file["scales"]["sim_time"])
snap_iter = np.array(file["scales"]["iteration"])
yy, zz = np.meshgrid(y, z)
maxT = np.max(T)
maxV = np.max(v)
maxW = np.max(w)
n_iter = len(T[:, 0:, 0])
start_time = time.time()
print("Plotting {} graphs".format(n_iter))
try:
for i in range(0, int(n_iter)):
fig = plt.figure(figsize=(8, 6))
gs = gridspec.GridSpec(ncols=2, nrows=3, figure=fig)
T_ax = fig.add_subplot(gs[0:2, 0])
v_ax = fig.add_subplot(gs[0, 1])
w_ax = fig.add_subplot(gs[1, 1])
KE_ax = fig.add_subplot(gs[2, :])
if (i % 50 == 0) and (i != 0):
sec_per_frame = (time.time() - start_time) / i
eta = sec_per_frame * (n_iter - i)
print(
"image {}/{} at {:.3f}ips \t| ETA in {}m {}s".format(
i, n_iter, sec_per_frame, int(eta // 60), int(eta % 60)
)
)
fig.suptitle(
"Iteration: {}\n".format(snap_iter[i])
+ r"Sim Time: {:.2f} $\tau_\kappa$".format(snap_t[i])
)
c1 = v_ax.contourf(
yy,
zz,
np.transpose(v[i, :, :]),
levels=np.linspace(np.min(v), maxV),
cmap="coolwarm",
)
c1_bar = fig.colorbar(c1, ax=v_ax)
c1_bar.set_label("v", rotation=0)
v_ax.set_ylabel("z")
v_ax.set_xlabel("y")
v_ax.invert_xaxis()
c2 = w_ax.contourf(
yy,
zz,
np.transpose(w[i, :, :]),
levels=np.linspace(np.min(w), maxW),
cmap="coolwarm",
)
c2_bar = fig.colorbar(c2, ax=w_ax)
c2_bar.set_label("w", rotation=0)
w_ax.set_ylabel("z")
w_ax.set_xlabel("y")
w_ax.invert_xaxis()
c3 = T_ax.contourf(
yy,
zz,
np.transpose(T[i, :, :]),
levels=np.linspace(0, maxT),
cmap="coolwarm",
)
c3_bar = fig.colorbar(c3, ax=T_ax)
c3_bar.set_label("T", rotation=0)
T_ax.set_ylabel("z")
T_ax.set_xlabel("y")
T_ax.invert_xaxis()
KE_ax.plot(snap_t[:i], KE[:i], "k")
KE_ax.set_xlabel(r"time [$\tau_\kappa$]")
KE_ax.set_ylabel("KE")
KE_ax.set_ylim([0, 1.1 * np.max(KE)])
KE_ax.set_xlim([0, np.max(snap_t)])
plt.tight_layout()
plt.savefig(direc + "figure/fig_{:03d}.png".format(i))
filenames.append(direc + "figure/fig_{:03d}.png".format(i))
plt.close()
plt.clf()
except KeyboardInterrupt:
print("ending loop")
print("completed in {:.2f} sec".format(time.time() - start_time))
print("Creating gif...")
with imageio.get_writer(direc + "info.gif", mode="I") as writer:
for filename in filenames:
image = imageio.imread(filename)
writer.append_data(image)
print("Removing raw image files...")
shutil.rmtree(direc + "figure")
if args.KE:
with h5py.File(direc + "analysis/analysis_s1.h5", "r") as f:
KE = np.array(f["tasks"]["KE"])[:, 0]
snap_t = np.array(f["scales"]["sim_time"])
fig = plt.figure(figsize=(6, 4))
ax = fig.add_subplot(111)
ax.plot(snap_t, KE, "k")
ax.set_xlabel(r"time [$\tau_\kappa$]")
ax.set_ylabel("KE")
plt.show()
plt.close()
print("done.")
| 31.52521 | 88 | 0.53432 |
48c388d2a91f2301d0f59df1f50eb64349cced6a | 2,104 | py | Python | direct_gd_predict/hash-profile.py | wac/meshop | ea5703147006e5e85617af897e1d1488e6f29f32 | [
"0BSD"
] | 1 | 2016-05-08T14:54:31.000Z | 2016-05-08T14:54:31.000Z | direct_gd_predict/hash-profile.py | wac/meshop | ea5703147006e5e85617af897e1d1488e6f29f32 | [
"0BSD"
] | null | null | null | direct_gd_predict/hash-profile.py | wac/meshop | ea5703147006e5e85617af897e1d1488e6f29f32 | [
"0BSD"
] | null | null | null | import sys
import heapq
import optparse
from bitcount2 import bitcount
hasher={}
profile={}
key_list=[]
key_col=0
sep='|'
key_col=0
#feature_col=1
#score_col=6
in_feature_col=0
in_score_col=1
process_feature_col=1
process_score_col=6
parser = optparse.OptionParser()
#parser.add_option("-n", dest="heapsize",
# default=50, action="store", type="int")
#parser.add_option("-R", "--random", dest="use_random",
# default=False, action="store_true")
(options, args) = parser.parse_args(sys.argv)
if (len(args) > 1):
profile_filename=args[1]
else:
usage()
for line in open(profile_filename):
if line[0]=='#':
continue
tuples=line.strip().split(sep)
key=tuples[in_feature_col]
key_list.append(key)
hasher[key]=tuples[in_score_col]
curr_profile={}
old_key=""
for line in sys.stdin:
line=line.strip()
if line[0]=='#':
print line
continue
tuples=line.split(sep)
curr_key=tuples[key_col]
if not old_key:
old_key=curr_key
if not old_key==curr_key:
hashval=do_hash(hasher, curr_profile, key_list)
hashval_int=int(hashval, 2)
print old_key+sep+hashval+sep+str(hashval_int)+sep+str(bitcount(hashval_int))
curr_profile={}
old_key=curr_key
curr_profile[tuples[process_feature_col]]=tuples[process_score_col]
hashval=do_hash(hasher, curr_profile, key_list)
hashval_int=int(hashval, 2)
print old_key+sep+hashval+sep+str(hashval_int)+sep+str(bitcount(hashval_int))
| 23.120879 | 85 | 0.65827 |
48c5df022af8f3cc4834d472772e95a600e0b3cc | 3,804 | py | Python | sdk/eventgrid/azure-eventgrid/azure/eventgrid/aio/_publisher_client_async.py | conniey/azure-sdk-for-python | f779de8e53dbec033f98f976284e6d9491fd60b3 | [
"MIT"
] | 2 | 2019-05-17T21:24:53.000Z | 2020-02-12T11:13:42.000Z | sdk/eventgrid/azure-eventgrid/azure/eventgrid/aio/_publisher_client_async.py | conniey/azure-sdk-for-python | f779de8e53dbec033f98f976284e6d9491fd60b3 | [
"MIT"
] | null | null | null | sdk/eventgrid/azure-eventgrid/azure/eventgrid/aio/_publisher_client_async.py | conniey/azure-sdk-for-python | f779de8e53dbec033f98f976284e6d9491fd60b3 | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core import AsyncPipelineClient
from msrest import Deserializer, Serializer
from .._models import CloudEvent, EventGridEvent, CustomEvent
from .._helpers import _get_topic_hostname_only_fqdn, _get_authentication_policy, _is_cloud_event
from azure.core.pipeline.policies import AzureKeyCredentialPolicy
from azure.core.credentials import AzureKeyCredential
from .._generated.aio import EventGridPublisherClient as EventGridPublisherClientAsync
from .. import _constants as constants
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Union, Dict, List
SendType = Union[
CloudEvent,
EventGridEvent,
CustomEvent,
Dict,
List[CloudEvent],
List[EventGridEvent],
List[CustomEvent],
List[Dict]
]
| 49.402597 | 137 | 0.701367 |
48c82cf824c0f047f355b4e4cd11359596e54a76 | 5,037 | py | Python | multi_tool.py | zbigos/multi_project_tools | cb9996d0fea0c2c763054ad5f78e904a68b9c80e | [
"Apache-2.0"
] | null | null | null | multi_tool.py | zbigos/multi_project_tools | cb9996d0fea0c2c763054ad5f78e904a68b9c80e | [
"Apache-2.0"
] | null | null | null | multi_tool.py | zbigos/multi_project_tools | cb9996d0fea0c2c763054ad5f78e904a68b9c80e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import logging, sys, argparse
from collect import Collection
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="test a project repo")
parser.add_argument('--force-delete', help='instead of aborting on existing files, delete them', action='store_const', const=True)
subparsers = parser.add_subparsers(help='help for subcommand', dest="command")
parser.add_argument('--config', help="the config file listing all project directories", default='projects.yaml')
parser.add_argument('--local-config', help="the local environment config file", default='local.yaml')
parser.add_argument('--project', help="just run for a single project, supply project ID", type=int)
parser.add_argument('--test-module', help="run the module's test", action='store_const', const=True)
parser.add_argument('--prove-wrapper', help="check the wrapper proof", action='store_const', const=True)
parser.add_argument('--test-caravel', help="check the caravel test", action='store_const', const=True)
parser.add_argument('--test-gds', help="check the gds", action='store_const', const=True)
parser.add_argument('--test-lvs', help="check the gds against powered verilog", action='store_const', const=True)
parser.add_argument('--test-tristate-z', help="check outputs are z when not active", action='store_const', const=True)
parser.add_argument('--test-ports', help="check ports defined in yaml match the verilog", action='store_const', const=True)
parser.add_argument('--test-git', help="check gitsha on disk matches the config", action='store_const', const=True)
parser.add_argument('--test-all', help="run all the checks for each project", action='store_const', const=True)
parser.add_argument('--test-from', help="run all the checks for all projects with id equal or more than the given id", type=int)
parser.add_argument('--openram', help="use OpenRAM - instantiate the bridge, wrapper and do the wiring", action='store_const', const=True)
parser.add_argument('--clone-shared-repos', help="clone shared repos defined in projects.yaml", action='store_const', const=True)
parser.add_argument('--clone-repos', help="git clone the repo", action='store_const', const=True)
parser.add_argument('--create-openlane-config', help="create the OpenLane & caravel_user_project config", action='store_const', const=True)
parser.add_argument('--gate-level', help="create the caravel includes file with gate level includes", action='store_const', const=True)
parser.add_argument('--copy-project', help="copy project's RTL and tests to correct locations in caravel_user_project", action='store_const', const=True)
parser.add_argument('--copy-gds', help="copy the projects GDS and LEF files", action='store_const', const=True)
parser.add_argument('--generate-doc', help="generate a index.md file with information about each project", action='store_const', const=True)
parser.add_argument('--dump-hash', help="print current commit hash of each project along with author and title", action='store_const', const=True)
parser.add_argument('--fill', help="for testing, repeat the given projects this number of times", type=int)
parser.add_argument('--annotate-image', help="annotate the multi_macro.png image generated by klayout", action='store_const', const=True)
parser.add_argument('--dump-macro-position', help="use the macro.cfg + gds to create a list of positions and sizes", action='store_const', const=True)
parser.add_argument('--layout-tool', help="run the manual layout tool on current designs", action='store_const', const=True)
parser.add_argument('--layout-tool-downscale', help="scale factor for layout tool", type=int)
args = parser.parse_args()
# setup log
log_format = logging.Formatter('%(asctime)s - %(module)-15s - %(levelname)-8s - %(message)s')
# configure the client logging
log = logging.getLogger('')
# has to be set to debug as is the root logger
log.setLevel(logging.INFO)
# create console handler and set level to info
ch = logging.StreamHandler(sys.stdout)
# create formatter for console
ch.setFormatter(log_format)
log.addHandler(ch)
collection = Collection(args)
# run any tests specified by arguments
collection.run_tests()
if args.layout_tool:
collection.launch_layout_tool(args.layout_tool_downscale)
# create all the OpenLane config for the user collection wrapper
if args.create_openlane_config:
collection.create_openlane_config()
# copy gds to correct place
if args.copy_gds:
collection.copy_all_gds()
if args.copy_project:
collection.copy_all_project_files_to_caravel()
# generate doc
if args.generate_doc:
collection.generate_docs()
# image
if args.annotate_image:
collection.annotate_image()
# dump macro pos - wip for assisted macro placement
if args.dump_macro_position:
collection.get_macro_pos()
| 59.258824 | 157 | 0.728608 |
48c98054b3a6ea0035473ed0534ee80b41dcebb8 | 4,327 | py | Python | benchbuild/projects/benchbuild/bots.py | ognarb/benchbuild | ad93ae0666e3100fd36c697793c0db1ba52938d0 | [
"MIT"
] | null | null | null | benchbuild/projects/benchbuild/bots.py | ognarb/benchbuild | ad93ae0666e3100fd36c697793c0db1ba52938d0 | [
"MIT"
] | null | null | null | benchbuild/projects/benchbuild/bots.py | ognarb/benchbuild | ad93ae0666e3100fd36c697793c0db1ba52938d0 | [
"MIT"
] | null | null | null | from plumbum import local
from benchbuild import project
from benchbuild.utils import compiler, download, run, wrapping
from benchbuild.utils.cmd import make, mkdir
| 28.655629 | 79 | 0.580079 |
48c9b882f54e25efdd1d54210cde93be6398663c | 668 | py | Python | clients/python/setup.py | timtadh/queued | 9c46a49a73103de9a929718c223326149cb9accd | [
"BSD-3-Clause"
] | 4 | 2015-12-29T05:07:50.000Z | 2022-02-10T20:27:40.000Z | clients/python/setup.py | timtadh/queued | 9c46a49a73103de9a929718c223326149cb9accd | [
"BSD-3-Clause"
] | 1 | 2015-04-16T15:56:26.000Z | 2015-04-16T15:56:26.000Z | clients/python/setup.py | timtadh/queued | 9c46a49a73103de9a929718c223326149cb9accd | [
"BSD-3-Clause"
] | 8 | 2015-02-24T12:05:16.000Z | 2022-02-10T20:27:41.000Z | try:
from setuptools import setup
setup # quiet "redefinition of unused ..." warning from pyflakes
# arguments that distutils doesn't understand
setuptools_kwargs = {
'install_requires': [
],
'provides': ['queued'],
'zip_safe': False
}
except ImportError:
from distutils.core import setup
setuptools_kwargs = {}
setup(name='queued',
version=1.1,
description=(
'A client for queued'
),
author='Tim Henderson',
author_email='tadh@case.edu',
url='queued.org',
packages=['queued',],
platforms=['unix'],
scripts=[],
**setuptools_kwargs
)
| 23.034483 | 69 | 0.586826 |
48ca7f075d0516343cadcc4c408fff80c48e1083 | 11,129 | py | Python | sym_executor.py | zhangzhenghsy/fiber | af1a8c8b01d4935849df73b01ccfeccbba742205 | [
"BSD-2-Clause"
] | null | null | null | sym_executor.py | zhangzhenghsy/fiber | af1a8c8b01d4935849df73b01ccfeccbba742205 | [
"BSD-2-Clause"
] | null | null | null | sym_executor.py | zhangzhenghsy/fiber | af1a8c8b01d4935849df73b01ccfeccbba742205 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/python
import angr,simuvex
import sys,os
import time
from utils_sig import *
from sym_tracer import Sym_Tracer
from sig_recorder import Sig_Recorder
#This class is responsible for performing symbolic execution.
| 54.287805 | 243 | 0.673556 |
48ca956ecd40df08896e125936a630042abd2d96 | 2,228 | py | Python | apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/pymysql/constants/CR.py | tharindu1st/apim-migration-resources | dd68aa8c53cf310392bb72e699dd24c57b109cfb | [
"Apache-2.0"
] | 1,573 | 2015-01-01T07:19:06.000Z | 2022-03-30T09:06:06.000Z | apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/pymysql/constants/CR.py | tharindu1st/apim-migration-resources | dd68aa8c53cf310392bb72e699dd24c57b109cfb | [
"Apache-2.0"
] | 1,691 | 2015-01-03T11:03:23.000Z | 2022-03-30T07:27:28.000Z | apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/pymysql/constants/CR.py | tharindu1st/apim-migration-resources | dd68aa8c53cf310392bb72e699dd24c57b109cfb | [
"Apache-2.0"
] | 895 | 2015-01-03T19:56:15.000Z | 2022-03-18T18:30:57.000Z | # flake8: noqa
# errmsg.h
CR_ERROR_FIRST = 2000
CR_UNKNOWN_ERROR = 2000
CR_SOCKET_CREATE_ERROR = 2001
CR_CONNECTION_ERROR = 2002
CR_CONN_HOST_ERROR = 2003
CR_IPSOCK_ERROR = 2004
CR_UNKNOWN_HOST = 2005
CR_SERVER_GONE_ERROR = 2006
CR_VERSION_ERROR = 2007
CR_OUT_OF_MEMORY = 2008
CR_WRONG_HOST_INFO = 2009
CR_LOCALHOST_CONNECTION = 2010
CR_TCP_CONNECTION = 2011
CR_SERVER_HANDSHAKE_ERR = 2012
CR_SERVER_LOST = 2013
CR_COMMANDS_OUT_OF_SYNC = 2014
CR_NAMEDPIPE_CONNECTION = 2015
CR_NAMEDPIPEWAIT_ERROR = 2016
CR_NAMEDPIPEOPEN_ERROR = 2017
CR_NAMEDPIPESETSTATE_ERROR = 2018
CR_CANT_READ_CHARSET = 2019
CR_NET_PACKET_TOO_LARGE = 2020
CR_EMBEDDED_CONNECTION = 2021
CR_PROBE_SLAVE_STATUS = 2022
CR_PROBE_SLAVE_HOSTS = 2023
CR_PROBE_SLAVE_CONNECT = 2024
CR_PROBE_MASTER_CONNECT = 2025
CR_SSL_CONNECTION_ERROR = 2026
CR_MALFORMED_PACKET = 2027
CR_WRONG_LICENSE = 2028
CR_NULL_POINTER = 2029
CR_NO_PREPARE_STMT = 2030
CR_PARAMS_NOT_BOUND = 2031
CR_DATA_TRUNCATED = 2032
CR_NO_PARAMETERS_EXISTS = 2033
CR_INVALID_PARAMETER_NO = 2034
CR_INVALID_BUFFER_USE = 2035
CR_UNSUPPORTED_PARAM_TYPE = 2036
CR_SHARED_MEMORY_CONNECTION = 2037
CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
CR_SHARED_MEMORY_MAP_ERROR = 2043
CR_SHARED_MEMORY_EVENT_ERROR = 2044
CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
CR_CONN_UNKNOW_PROTOCOL = 2047
CR_INVALID_CONN_HANDLE = 2048
CR_SECURE_AUTH = 2049
CR_FETCH_CANCELED = 2050
CR_NO_DATA = 2051
CR_NO_STMT_METADATA = 2052
CR_NO_RESULT_SET = 2053
CR_NOT_IMPLEMENTED = 2054
CR_SERVER_LOST_EXTENDED = 2055
CR_STMT_CLOSED = 2056
CR_NEW_STMT_METADATA = 2057
CR_ALREADY_CONNECTED = 2058
CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
CR_DUPLICATE_CONNECTION_ATTR = 2060
CR_AUTH_PLUGIN_ERR = 2061
CR_ERROR_LAST = 2061
| 32.289855 | 47 | 0.745063 |
48cccf1158ee9bcd15cefd678338ca10d4234710 | 992 | py | Python | check-challenge.py | gjaiswal108/Check-if-Challenge-problem-added-in-codechef | 74b29725ad38bdf0dc210dbdb67fccf056ec6d8c | [
"Apache-2.0"
] | null | null | null | check-challenge.py | gjaiswal108/Check-if-Challenge-problem-added-in-codechef | 74b29725ad38bdf0dc210dbdb67fccf056ec6d8c | [
"Apache-2.0"
] | null | null | null | check-challenge.py | gjaiswal108/Check-if-Challenge-problem-added-in-codechef | 74b29725ad38bdf0dc210dbdb67fccf056ec6d8c | [
"Apache-2.0"
] | null | null | null | import requests,smtplib,time
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
r =requests.get('https://www.codechef.com/JUNE19B/')
while(1):
if('(Challenge)' in r.text):
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login("sender_gmail_id", "password")
msg= MIMEMultipart("alternative")
msg["Subject"]="Challenge Problem added"
msg["From"]="sender_gmail_id"
msg["To"]="receiver_gmail_id"
text="I guess challenge problem is added in long challenge,check it on codechef."
html="<h4>I guess challenge problem is added in long challenge,check it on codechef.</h4><br/><a href='https://www.codechef.com/'>Click here to visit. </a>"
msg.attach(MIMEText(html, "html"))
s.sendmail("sender_gmail_id","receiver_gmail_id",msg.as_string())
s.quit()
print('sent')
break
print('Sleeping...')
time.sleep(3600)
print('Trying again...')
| 41.333333 | 164 | 0.647177 |
48cd5f2495ad481cbf1d4200796edf478513850e | 2,840 | py | Python | applications/tensorflow/click_through_rate/din/test/test_attention_fcn.py | kew96/GraphcoreExamples | 22dc0d7e3755b0a7f16cdf694c6d10c0f91ee8eb | [
"MIT"
] | null | null | null | applications/tensorflow/click_through_rate/din/test/test_attention_fcn.py | kew96/GraphcoreExamples | 22dc0d7e3755b0a7f16cdf694c6d10c0f91ee8eb | [
"MIT"
] | null | null | null | applications/tensorflow/click_through_rate/din/test/test_attention_fcn.py | kew96/GraphcoreExamples | 22dc0d7e3755b0a7f16cdf694c6d10c0f91ee8eb | [
"MIT"
] | null | null | null | # Copyright (c) 2020 Graphcore Ltd. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests covering attention used by the DIN model.
"""
import tensorflow as tf
import unittest
import pytest
import numpy as np
import sys
from pathlib import Path
# Add common module to path
common_path = Path(Path(__file__).absolute().parent.parent.parent)
sys.path.append(str(common_path))
from common.utils import din_attention
from din.din_model import DIN
seed = 3
tf.set_random_seed(seed)
| 34.216867 | 116 | 0.669366 |
48cd84239fff9070a94f62f2913b39c9eded80ea | 204 | py | Python | shiva/constants.py | tooxie/shiva-server | 4d169aae8d4cb01133f62701b14610695e48c297 | [
"MIT"
] | 70 | 2015-01-09T15:15:15.000Z | 2022-01-14T09:51:55.000Z | shiva/constants.py | tooxie/shiva-server | 4d169aae8d4cb01133f62701b14610695e48c297 | [
"MIT"
] | 14 | 2015-01-04T10:08:26.000Z | 2021-12-13T19:35:07.000Z | shiva/constants.py | tooxie/shiva-server | 4d169aae8d4cb01133f62701b14610695e48c297 | [
"MIT"
] | 19 | 2015-01-02T22:42:01.000Z | 2022-01-14T09:51:59.000Z | # -*- coding: utf-8 -*-
| 17 | 32 | 0.632353 |
48cf0852e6ab6d1a0771fbc1bfea6839386062de | 1,287 | py | Python | tests/test_cmdline.py | lin-zone/scrapyu | abcb8eed2ea02121b74017e007c57c0d3762342d | [
"MIT"
] | 1 | 2021-01-05T09:11:42.000Z | 2021-01-05T09:11:42.000Z | tests/test_cmdline.py | lin-zone/scrapyu | abcb8eed2ea02121b74017e007c57c0d3762342d | [
"MIT"
] | null | null | null | tests/test_cmdline.py | lin-zone/scrapyu | abcb8eed2ea02121b74017e007c57c0d3762342d | [
"MIT"
] | null | null | null | import sys
import subprocess
from tempfile import mkdtemp, TemporaryFile
from path import Path
from tests import TEST_DIR
args = (sys.executable, '-m', 'scrapyu.cmdline')
| 33.868421 | 97 | 0.688423 |
48cf85efc52b96d39ed18f6149964691786778a9 | 3,257 | py | Python | src/olympia/amo/cron.py | dante381/addons-server | 9702860a19ecca1cb4e4998f37bc43c1b2dd3aa7 | [
"BSD-3-Clause"
] | null | null | null | src/olympia/amo/cron.py | dante381/addons-server | 9702860a19ecca1cb4e4998f37bc43c1b2dd3aa7 | [
"BSD-3-Clause"
] | null | null | null | src/olympia/amo/cron.py | dante381/addons-server | 9702860a19ecca1cb4e4998f37bc43c1b2dd3aa7 | [
"BSD-3-Clause"
] | null | null | null | from datetime import datetime, timedelta
from django.core.files.storage import default_storage as storage
import olympia.core.logger
from olympia import amo
from olympia.activity.models import ActivityLog
from olympia.addons.models import Addon
from olympia.addons.tasks import delete_addons
from olympia.amo.utils import chunked
from olympia.files.models import FileUpload
from olympia.scanners.models import ScannerResult
from olympia.amo.models import FakeEmail
from . import tasks
from .sitemap import (
get_sitemap_path,
get_sitemaps,
get_sitemap_section_pages,
render_index_xml,
)
log = olympia.core.logger.getLogger('z.cron')
def gc(test_result=True):
"""Site-wide garbage collections."""
log.info('Collecting data to delete')
logs = (
ActivityLog.objects.filter(created__lt=days_ago(90))
.exclude(action__in=amo.LOG_KEEP)
.values_list('id', flat=True)
)
for chunk in chunked(logs, 100):
tasks.delete_logs.delay(chunk)
two_weeks_ago = days_ago(15)
# Hard-delete stale add-ons with no versions. No email should be sent.
versionless_addons = Addon.unfiltered.filter(
versions__pk=None, created__lte=two_weeks_ago
).values_list('pk', flat=True)
for chunk in chunked(versionless_addons, 100):
delete_addons.delay(chunk, with_deleted=True)
# Delete stale FileUploads.
stale_uploads = FileUpload.objects.filter(created__lte=two_weeks_ago).order_by('id')
for file_upload in stale_uploads:
log.info(
'[FileUpload:{uuid}] Removing file: {path}'.format(
uuid=file_upload.uuid, path=file_upload.path
)
)
if file_upload.path:
try:
storage.delete(file_upload.path)
except OSError:
pass
file_upload.delete()
# Delete stale ScannerResults.
ScannerResult.objects.filter(upload=None, version=None).delete()
# Delete fake emails older than 90 days
FakeEmail.objects.filter(created__lte=days_ago(90)).delete()
| 34.648936 | 88 | 0.684986 |
48d0551fc7668ef91b0cbb625288bc4330046f92 | 642 | py | Python | day8/test_day8.py | bwbeach/advent-of-code-2020 | 572810c3adae5815543efde17a4bca9596d05a5b | [
"CC0-1.0"
] | null | null | null | day8/test_day8.py | bwbeach/advent-of-code-2020 | 572810c3adae5815543efde17a4bca9596d05a5b | [
"CC0-1.0"
] | null | null | null | day8/test_day8.py | bwbeach/advent-of-code-2020 | 572810c3adae5815543efde17a4bca9596d05a5b | [
"CC0-1.0"
] | null | null | null | from day8.day8 import fix_code, parse_code, run
SAMPLE_CODE_LOOP = """nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6
"""
SAMPLE_CODE_HALT = """nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
nop -4
acc +6
"""
| 15.285714 | 88 | 0.638629 |
48d23528c08e020ee5f13c45ec80e61813e3bd41 | 6,128 | py | Python | biosys/apps/main/tests/api/test_misc.py | florianm/biosys | 934d06ed805b0734f3cb9a00feec6cd81a94e512 | [
"Apache-2.0"
] | 2 | 2018-04-09T04:02:30.000Z | 2019-08-20T03:12:55.000Z | biosys/apps/main/tests/api/test_misc.py | florianm/biosys | 934d06ed805b0734f3cb9a00feec6cd81a94e512 | [
"Apache-2.0"
] | 29 | 2016-01-20T08:14:15.000Z | 2017-07-13T07:17:32.000Z | biosys/apps/main/tests/api/test_misc.py | florianm/biosys | 934d06ed805b0734f3cb9a00feec6cd81a94e512 | [
"Apache-2.0"
] | 5 | 2016-01-14T23:02:36.000Z | 2016-09-21T05:35:03.000Z | from django.shortcuts import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from main.models import Project
from main.tests import factories
from main.tests.api import helpers
| 31.587629 | 101 | 0.590078 |
48d29ebbfa1dba9c5ef7d472e7d45e6999e1c63b | 531 | py | Python | src/netwrok/analytics.py | simonwittber/netwrok-server | d4767faa766e7ecb0de0c912f0c0a26b45b84189 | [
"MIT"
] | 16 | 2015-12-01T14:42:30.000Z | 2021-04-26T21:16:45.000Z | src/netwrok/analytics.py | DifferentMethods/netwrok-server | d4767faa766e7ecb0de0c912f0c0a26b45b84189 | [
"MIT"
] | null | null | null | src/netwrok/analytics.py | DifferentMethods/netwrok-server | d4767faa766e7ecb0de0c912f0c0a26b45b84189 | [
"MIT"
] | 4 | 2015-03-02T07:19:15.000Z | 2015-10-14T07:38:02.000Z | import asyncio
import aiopg
from . import nwdb
from . import core
| 27.947368 | 76 | 0.653484 |
48d3bd9308acb8eb9e29472526d5d05261bbdb90 | 635 | py | Python | monte_carlo/helpers/muaanalytical.py | nathhje/bachelorproject | 4bca826d1e065f647e2088b1fd028b1bdf863124 | [
"MIT"
] | null | null | null | monte_carlo/helpers/muaanalytical.py | nathhje/bachelorproject | 4bca826d1e065f647e2088b1fd028b1bdf863124 | [
"MIT"
] | null | null | null | monte_carlo/helpers/muaanalytical.py | nathhje/bachelorproject | 4bca826d1e065f647e2088b1fd028b1bdf863124 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Deterimines the reflectance based on r and mua.
"""
import math
import helpers.analyticalvalues as av
def reflectance(mua, r):
"""
mua: the absorption coefficient used.
r: the radial distance used.
"""
values = av.analyticalValues(r, mua)
# the value of the reflectance is determined
return (values.z0 * (values.ueff + values.rho1 ** -1) * math.exp( -values.ueff * values.rho1)
/ (values.rho1 ** 2) + (values.z0 + 2 * values.zb) * (values.ueff + values.rho2 ** -1)
* math.exp( -values.ueff * values.rho2) / (values.rho2 ** 2)) / 4 / math.pi
| 30.238095 | 99 | 0.60315 |
48d3e34f960926be47270d979dba99f1e974b2b3 | 476 | py | Python | main/test_data.py | anna01111/demo_web_ui_test_suite | 69bedc25126b874774e2f51a83356dc9ee1b7e74 | [
"CC0-1.0"
] | null | null | null | main/test_data.py | anna01111/demo_web_ui_test_suite | 69bedc25126b874774e2f51a83356dc9ee1b7e74 | [
"CC0-1.0"
] | null | null | null | main/test_data.py | anna01111/demo_web_ui_test_suite | 69bedc25126b874774e2f51a83356dc9ee1b7e74 | [
"CC0-1.0"
] | null | null | null | from faker import Faker
"""
More info: https://microservices-demo.github.io/docs/user-accounts.html
"""
# The demo app is shipped with the following account:
username = 'user'
password = 'password'
# Fake data that is used for new registrations:
faker = Faker()
autogenerated_username = faker.user_name()
autogenerated_first_name = faker.first_name()
autogenerated_last_name = faker.last_name()
autogenerated_email = faker.email()
autogenerated_password = faker.password()
| 26.444444 | 71 | 0.779412 |
48d3f8d217b00f2ba74165ed887ea259202fee75 | 1,115 | py | Python | pfr/run.py | AnnaMag/pdf-flask-react | de89eb13b2e2e0d4418c28041fe294205f528b96 | [
"BSD-2-Clause"
] | 2 | 2019-01-04T16:55:05.000Z | 2019-08-28T20:16:47.000Z | pfr/run.py | AnnaMag/pdf-flask-react | de89eb13b2e2e0d4418c28041fe294205f528b96 | [
"BSD-2-Clause"
] | 2 | 2021-06-01T21:52:21.000Z | 2021-12-13T19:43:43.000Z | pfr/run.py | AnnaMag/pdf-flask-react | de89eb13b2e2e0d4418c28041fe294205f528b96 | [
"BSD-2-Clause"
] | null | null | null | from io import StringIO
from io import BytesIO
import urllib
from urllib import request
import utils
from pdf_processing import scrape_gazette_names, get_info_outline
from data_parsing import save_to_dict
if __name__ == '__main__':
# not saving anything locally, just the names listed on the webpage to access the files later
url = 'http://www.gpwonline.co.za/Gazettes/Pages/Published-National-Regulation-Gazettes.aspx'
doc_names = scrape_gazette_names(url)
db_name = 'gov_docs'
db_collection = 'nat_reg'
collection = utils.set_collection(db_name, db_collection)
for url in doc_names[0][3:5]:
print(url)
fp = BytesIO(urllib.request.urlopen(url).read())
info, device, pages_skipped = get_info_outline(fp)
print(info)
#pages_skipped should be pages for extraction- for now is to montitore problems
gaz_dict = save_to_dict(device.interesting_text, device.aux_text, \
pages_skipped, info, device.page_number, url)
print(gaz_dict)
utils.write_db(collection, gaz_dict)
| 33.787879 | 97 | 0.699552 |
48d4f15c7fa28d9ec9d8b63f2ea935ca7b5152ba | 1,246 | py | Python | day9/day9.py | jaredledvina/adventofcode2020 | 2a31fd88c0b6bddd2c06327d04e6630b8fb29909 | [
"MIT"
] | 1 | 2020-12-09T14:50:49.000Z | 2020-12-09T14:50:49.000Z | day9/day9.py | jaredledvina/adventofcode2020 | 2a31fd88c0b6bddd2c06327d04e6630b8fb29909 | [
"MIT"
] | null | null | null | day9/day9.py | jaredledvina/adventofcode2020 | 2a31fd88c0b6bddd2c06327d04e6630b8fb29909 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import itertools
if __name__ == '__main__':
main() | 29.666667 | 140 | 0.629213 |
48d562ad3234975315fbded1821628c64f55b9d7 | 4,461 | py | Python | streamlitfront/tests/common.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
] | null | null | null | streamlitfront/tests/common.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
] | 1 | 2022-02-03T15:21:57.000Z | 2022-02-05T00:51:33.000Z | streamlitfront/tests/common.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
] | null | null | null | from contextlib import contextmanager
from functools import partial
from inspect import Parameter
from random import choice, randint, uniform
import string
from typing import Any
from i2 import Sig
from numbers import Number
from sys import platform
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver import Chrome, ChromeOptions
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.service import Service
from webdriver_manager.chrome import ChromeDriverManager
from strand import run_process
from streamlitfront.run_app import run_app
from time import sleep
import dill
import pickle
STREAMLIT_APP_URL = 'http://localhost:8501'
def give_a_chance_to_render_element(func):
"""
Gives a chance to the application to render the element by trying up to three times
with 1 second of interval to find it before raising an error.
"""
# @wrap(func)
return wrapper
| 33.795455 | 87 | 0.68998 |
48d584fffe50d5a164a634c7bdeab43a85e1c776 | 16,094 | py | Python | Python_files/analyse.py | Deniz-shelby/goodreads_webscrap | 80be6eb85f8a128eeeef2f845726557852991463 | [
"Apache-2.0"
] | null | null | null | Python_files/analyse.py | Deniz-shelby/goodreads_webscrap | 80be6eb85f8a128eeeef2f845726557852991463 | [
"Apache-2.0"
] | 1 | 2021-04-14T07:41:49.000Z | 2021-04-14T10:02:45.000Z | Python_files/analyse.py | Deniz-shelby/goodreads_webscrap | 80be6eb85f8a128eeeef2f845726557852991463 | [
"Apache-2.0"
] | 2 | 2021-04-14T05:31:24.000Z | 2021-04-19T08:00:40.000Z |
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib import rcParams
from sklearn.preprocessing import MinMaxScaler
import warnings
import scipy.stats as st
import statsmodels as sm
| 36.494331 | 122 | 0.626942 |
48d7564242b45a65bf822b185e1203ecbd2093a0 | 773 | py | Python | 3 assignment/number_of_digits_unitest.py | nastae/programavimas_python | 7e65ad834c5f52e146fb5fcd0408b344545dc30e | [
"Apache-2.0"
] | null | null | null | 3 assignment/number_of_digits_unitest.py | nastae/programavimas_python | 7e65ad834c5f52e146fb5fcd0408b344545dc30e | [
"Apache-2.0"
] | null | null | null | 3 assignment/number_of_digits_unitest.py | nastae/programavimas_python | 7e65ad834c5f52e146fb5fcd0408b344545dc30e | [
"Apache-2.0"
] | null | null | null | import unittest
# Paraykite funkcijai X unittest'us
if __name__ == '__main__':
unittest.main()
| 24.935484 | 48 | 0.65718 |
48d79b6a3679e4354a437a7315a9dd9bd23f2c50 | 3,971 | py | Python | scraper/edx.py | thanasis457/Mooc-platform | 5ff3b7b43fadc86ec5d4d54db6963449a6610bb5 | [
"MIT"
] | 4 | 2020-08-30T12:18:27.000Z | 2021-05-19T06:42:13.000Z | scraper/edx.py | thanasis457/Mooc-platform | 5ff3b7b43fadc86ec5d4d54db6963449a6610bb5 | [
"MIT"
] | 1 | 2021-01-28T20:21:48.000Z | 2021-01-28T20:21:48.000Z | scraper/edx.py | thanasis457/Mooc-platform | 5ff3b7b43fadc86ec5d4d54db6963449a6610bb5 | [
"MIT"
] | 1 | 2020-09-14T13:20:05.000Z | 2020-09-14T13:20:05.000Z | import requests, json, bs4, urllib.parse, math
from . import Course, Platform
subject_uuids = {'d8244ef2-45fb-4be3-a9d7-a6749cee3b19': 'Architecture',
'2cc66121-0c07-407b-96c4-99305359a36f': 'Art & Culture',
'9d5b5edb-254a-4d54-b430-776f1f00eaf0': 'Biology & Life Sciences',
'409d43f7-ff36-4834-9c28-252132347d87': 'Business & Management',
'c5ec1f86-4e59-4273-8e22-ceec2b8d10a2': 'Chemistry',
'605bb663-a342-4cf3-b5a5-fee2f33f1642': 'Communication',
'e52e2134-a4e4-4fcb-805f-cbef40812580': 'Computer Science',
'a168a80a-4b6c-4d92-9f1d-4c235206feaf': 'Data Analysis & Statistics',
'34173fb0-fe3d-4715-b4e0-02a9426a873c': 'Design',
'bab458d9-19b3-476e-864f-8abd1d1aab44': 'Economics & Finance',
'8ac7a3da-a60b-4565-b361-384baaa49279': 'Education & Teacher Training',
'337dfb23-571e-49d7-9c8e-385120dea6f3': 'Electronics',
'07406bfc-76c4-46cc-a5bf-2deace7995a6': 'Energy & Earth Sciences',
'0d7bb9ed-4492-419a-bb44-415adafd9406': 'Engineering',
'8aaac548-1930-4614-aeb4-a089dae7ae26': 'Environmental Studies',
'8a552a20-963e-475c-9b0d-4c5efe22d015': 'Ethics',
'caa4db79-f325-41ca-8e09-d5bb6e148240': 'Food & Nutrition',
'51a13a1c-7fc8-42a6-9e96-6636d10056e2': 'Health & Safety',
'c8579e1c-99f2-4a95-988c-3542909f055e': 'Histroy',
'00e5d5e0-ce45-4114-84a1-50a5be706da5': 'Humanities',
'32768203-e738-4627-8b04-78b0ed2b44cb': 'Language',
'4925b67d-01c4-4287-a8d1-a3e0066113b8': 'Law',
'74b6ed2a-3ba0-49be-adc9-53f7256a12e1': 'Literature',
'a669e004-cbc0-4b68-8882-234c12e1cce4': 'Math',
'a5db73b2-05b4-4284-beef-c7876ec1499b': 'Medicine',
'f520dcc1-f5b7-42fe-a757-8acfb1e9e79d': 'Music',
'830f46dc-624e-46f4-9df0-e2bc6b346956': 'Philosophy & Ethics',
'88eb7ca7-2296-457d-8aac-e5f7503a9333': 'Physics',
'f830cfeb-bb7e-46ed-859d-e2a9f136499f': 'Science',
'eefb009b-0a02-49e9-b1b1-249982b6ce86': 'Social Sciences'}
| 44.617978 | 88 | 0.576681 |
48d950cb515fdc01c87e2cf97d07a2e9d9b96b55 | 8,409 | py | Python | main.py | LaudateCorpus1/TotalConnect2.0_API-Arm-Disarm | 96885410defa036b37b5f6ae86b322de89c850ae | [
"MIT"
] | 1 | 2017-03-06T03:44:40.000Z | 2017-03-06T03:44:40.000Z | main.py | LaudateCorpus1/TotalConnect2.0_API-Arm-Disarm | 96885410defa036b37b5f6ae86b322de89c850ae | [
"MIT"
] | null | null | null | main.py | LaudateCorpus1/TotalConnect2.0_API-Arm-Disarm | 96885410defa036b37b5f6ae86b322de89c850ae | [
"MIT"
] | 2 | 2020-01-20T12:57:55.000Z | 2022-02-08T07:03:58.000Z | #!/usr/local/bin/python2.7
#FREEBSD 2 Minutes ARP Expires - /bin/echo "net.link.ether.inet.max_age 300" >> /etc/sysctl.conf
#Crontab -e "* * * * * /usr/local/bin/python2.7 /root/Security.py"
import subprocess
import ConfigParser
import string, os, sys, httplib
import xml.etree.ElementTree as ET
from datetime import datetime, time
now = datetime.now()
now_time = now.time()
#---- BOL FOR CONFIGURTION INI ----#
# Documentation: https://wiki.python.org/moin/ConfigParserExamples #
Config = ConfigParser.ConfigParser()
Config.read("Security.ini")
cfgfile = open("Security.ini")
state = BoolConfigSectionMap("Status")['armed']
#---- EOL FOR CONFIGURTION INI ----#
device1 = '00:00:00:00:00:00'
device2 = '00:00:00:00:00:00'
device3 = '00:00:00:00:00:00'
#---- BOL for LOG Output ---- #
Log = open('SecurityAuditlog.txt', 'w')
print >> Log, "---------",now_time,"---------"
#---- BOL API Section ----#
#---- EOL API Section ----#
# ---- BOL Program Initiation and function mapping ----#
runcheck()
# ---- EOL Program Initiation and function mapping ----#
#---- Logging ---- #
print >> Log, "- Armed",state,"-",peopleTotal,"DEVICES PRESENT","-"
Log.close()
#---- EOL for LOG Output ---- #
| 39.665094 | 275 | 0.646093 |
48d989d7c7b86f58f750e3be1818f6a34de5e9dd | 1,538 | py | Python | prm/relations/migrations/0002_activity.py | justaname94/innovathon2019 | d1a4e9b1b877ba12ab23384b9ee098fcdbf363af | [
"MIT"
] | null | null | null | prm/relations/migrations/0002_activity.py | justaname94/innovathon2019 | d1a4e9b1b877ba12ab23384b9ee098fcdbf363af | [
"MIT"
] | 4 | 2021-06-08T20:20:05.000Z | 2022-03-11T23:58:37.000Z | prm/relations/migrations/0002_activity.py | justaname94/personal_crm | d1a4e9b1b877ba12ab23384b9ee098fcdbf363af | [
"MIT"
] | null | null | null | # Generated by Django 2.2.5 on 2019-09-09 21:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
| 43.942857 | 156 | 0.617035 |
48da319d7adab06ea080a4bfe703b82db0fccf2a | 1,106 | py | Python | elliptic_moab/Mesh/MeshQL.py | padmec-reservoir/elliptic_moab | a3b15f29a621c35a8279fd163326a0895aa67f30 | [
"MIT"
] | null | null | null | elliptic_moab/Mesh/MeshQL.py | padmec-reservoir/elliptic_moab | a3b15f29a621c35a8279fd163326a0895aa67f30 | [
"MIT"
] | null | null | null | elliptic_moab/Mesh/MeshQL.py | padmec-reservoir/elliptic_moab | a3b15f29a621c35a8279fd163326a0895aa67f30 | [
"MIT"
] | null | null | null | from typing import Type
from elliptic.Kernel.Context import ContextDelegate
from .Selector import SelectorImplementation
from .Manager import ManagerImplementation
from .Computer import ComputerImplementation
| 34.5625 | 98 | 0.654611 |
48da48030860d7cf05ae6d06f45e092b1b0c01b7 | 1,229 | py | Python | tests/test_quil.py | stjordanis/quantumflow | bf965f0ca70cd69b387f9ca8407ab38da955e925 | [
"Apache-2.0"
] | 99 | 2018-12-03T20:41:39.000Z | 2022-02-21T13:56:08.000Z | tests/test_quil.py | stjordanis/quantumflow | bf965f0ca70cd69b387f9ca8407ab38da955e925 | [
"Apache-2.0"
] | 1 | 2021-06-25T15:18:31.000Z | 2021-06-25T15:18:31.000Z | tests/test_quil.py | stjordanis/quantumflow | bf965f0ca70cd69b387f9ca8407ab38da955e925 | [
"Apache-2.0"
] | 24 | 2018-12-03T20:41:41.000Z | 2022-01-03T01:11:45.000Z |
# Copyright 2016-2018, Rigetti Computing
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
import pytest
import quantumflow as qf
QUIL_FILES = [
'hello_world.quil',
'empty.quil',
'classical_logic.quil',
'control_flow.quil',
'measure.quil',
'qaoa.quil',
'bell.quil',
# 'include.quil',
]
RUNNABLE_QUIL_FILES = QUIL_FILES[:-1]
| 23.188679 | 77 | 0.613507 |
48daec9dcfb1b92e90a94069bc6dece79afb65a2 | 1,254 | py | Python | gitool/util.py | eikendev/gitool | 9bfa248093d4ee3caf25fde1a59f4f0fc66994af | [
"MIT"
] | 1 | 2022-03-17T06:26:20.000Z | 2022-03-17T06:26:20.000Z | gitool/util.py | eikendev/gitool | 9bfa248093d4ee3caf25fde1a59f4f0fc66994af | [
"MIT"
] | null | null | null | gitool/util.py | eikendev/gitool | 9bfa248093d4ee3caf25fde1a59f4f0fc66994af | [
"MIT"
] | null | null | null | import itertools
import logging
from git import Repo, InvalidGitRepositoryError
from .repository import Repository
logger = logging.getLogger("gitool")
| 24.115385 | 68 | 0.633174 |
48dbc22d623e96499bba5ef1f32d58521697a022 | 3,571 | py | Python | taiga/projects/epics/serializers.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
] | null | null | null | taiga/projects/epics/serializers.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
] | 12 | 2019-11-25T14:08:32.000Z | 2021-06-24T10:35:51.000Z | taiga/projects/epics/serializers.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2017 Jess Espino <jespinog@gmail.com>
# Copyright (C) 2014-2017 David Barragn <bameda@dbarragan.com>
# Copyright (C) 2014-2017 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.base.api import serializers
from taiga.base.fields import Field, MethodField
from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender
from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import StatusExtraInfoSerializerMixin
from taiga.projects.notifications.mixins import WatchedResourceSerializer
from taiga.projects.tagging.serializers import TaggedInProjectResourceSerializer
from taiga.projects.votes.mixins.serializers import VoteResourceSerializerMixin
| 40.123596 | 104 | 0.758051 |
48dbc9d4daecd2cf1d72d63509bbaa3a2bffe8c4 | 2,178 | py | Python | src/TMDbApi/TMTranslationUnit.py | shasha79/nectm | 600044a6fe2c3a73e0d9327bc85883831a26dcae | [
"Apache-2.0"
] | 3 | 2020-02-28T21:42:44.000Z | 2021-03-12T13:56:16.000Z | src/TMDbApi/TMTranslationUnit.py | Pangeamt/nectm | 6b84f048698f2530b9fdbb30695f2e2217c3fbfe | [
"Apache-2.0"
] | 2 | 2020-11-06T14:40:10.000Z | 2020-12-29T19:03:11.000Z | src/TMDbApi/TMTranslationUnit.py | Pangeamt/nectm | 6b84f048698f2530b9fdbb30695f2e2217c3fbfe | [
"Apache-2.0"
] | 2 | 2020-03-26T16:05:11.000Z | 2020-08-06T16:35:39.000Z | #
# Copyright (c) 2020 Pangeanic SL.
#
# This file is part of NEC TM
# (see https://github.com/shasha79/nectm).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import uuid
| 36.915254 | 138 | 0.674472 |
48dcecd475c9d9c66ff47a1b76abf99c791428f8 | 805 | py | Python | tests/test_268.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_268.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_268.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import pytest
"""
Test 268. Missing Number
"""
| 23.676471 | 83 | 0.696894 |
48de82f88d77ad42fe5f179efaac8655f74f00d7 | 5,682 | py | Python | tests/db/test_connector.py | DaWeSearch/backend | 809e575ed730fce55d0e89a2fbc2031ba116f5e0 | [
"MIT"
] | 1 | 2021-02-15T01:05:22.000Z | 2021-02-15T01:05:22.000Z | tests/db/test_connector.py | DaWeSearch/backend | 809e575ed730fce55d0e89a2fbc2031ba116f5e0 | [
"MIT"
] | null | null | null | tests/db/test_connector.py | DaWeSearch/backend | 809e575ed730fce55d0e89a2fbc2031ba116f5e0 | [
"MIT"
] | null | null | null | import unittest
import os
import json
from functions.db.connector import *
from functions.db.models import *
from functions.authentication import *
sample_search = {
"search_groups": [
{
"search_terms": ["blockchain", "distributed ledger"],
"match": "OR"
},
{
"search_terms": ["energy", "infrastructure", "smart meter"],
"match": "OR"
}
],
"match": "AND"
}
db_dict = {"db_name": "hallo", "api_key": "test"}
if __name__ == '__main__':
unittest.main()
| 29.28866 | 80 | 0.62566 |
48deb6f756807dc27d051aa0715208fc6f52b020 | 1,513 | py | Python | tests/test_capstone.py | GrammaTech/gtirb-capstone | f46d90e9cd733c632620e5d8c921a4b9f011020a | [
"MIT"
] | 6 | 2020-04-10T15:19:30.000Z | 2021-04-13T22:54:17.000Z | tests/test_capstone.py | GrammaTech/gtirb-capstone | f46d90e9cd733c632620e5d8c921a4b9f011020a | [
"MIT"
] | null | null | null | tests/test_capstone.py | GrammaTech/gtirb-capstone | f46d90e9cd733c632620e5d8c921a4b9f011020a | [
"MIT"
] | 3 | 2020-07-10T22:52:32.000Z | 2021-02-13T19:52:22.000Z | # Copyright (C) 2020 GrammaTech, Inc.
#
# This code is licensed under the MIT license. See the LICENSE file in
# the project root for license terms.
#
# This project is sponsored by the Office of Naval Research, One Liberty
# Center, 875 N. Randolph Street, Arlington, VA 22203 under contract #
# N68335-17-C-0700. The content of the information does not necessarily
# reflect the position or policy of the Government and no official
# endorsement should be inferred.
#
import pytest
import gtirb
import gtirb_capstone
| 30.877551 | 72 | 0.68341 |
48df4ad454aad4847f1d7ce4f347d3747f7148ed | 2,552 | py | Python | python/paddle/fluid/tests/unittests/npu/test_update_loss_scaling_min_op_npu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/npu/test_update_loss_scaling_min_op_npu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/npu/test_update_loss_scaling_min_op_npu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | 1 | 2021-12-09T08:59:17.000Z | 2021-12-09T08:59:17.000Z | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import sys
import os
sys.path.append("..")
from op_test import OpTest
import paddle
import paddle.fluid as fluid
import paddle.fluid.contrib.mixed_precision.amp_nn as amp_nn
from test_update_loss_scaling_op_npu import TestUpdateLossScalingOpBad
paddle.enable_static()
SEED = 2021
if __name__ == '__main__':
unittest.main()
| 32.303797 | 75 | 0.647727 |
48df99695d0c2e85858fd3010b30aa03fd644e15 | 1,031 | py | Python | Examples/WorkingWithMimeMessages/SetEmailHeaders.py | Muzammil-khan/Aspose.Email-Python-Dotnet | 04ca3a6f440339f3ddf316218f92d15d66f24e7e | [
"MIT"
] | 5 | 2019-01-28T05:17:12.000Z | 2020-04-14T14:31:34.000Z | Examples/WorkingWithMimeMessages/SetEmailHeaders.py | Muzammil-khan/Aspose.Email-Python-Dotnet | 04ca3a6f440339f3ddf316218f92d15d66f24e7e | [
"MIT"
] | 1 | 2019-01-28T16:07:26.000Z | 2021-11-25T10:59:52.000Z | Examples/WorkingWithMimeMessages/SetEmailHeaders.py | Muzammil-khan/Aspose.Email-Python-Dotnet | 04ca3a6f440339f3ddf316218f92d15d66f24e7e | [
"MIT"
] | 6 | 2018-07-16T14:57:34.000Z | 2020-08-30T05:59:52.000Z | import aspose.email as ae
import datetime
if __name__ == '__main__':
run()
| 33.258065 | 81 | 0.681862 |
48e060479c6f9450fb40ff919e56deed4c5f57d9 | 7,527 | py | Python | intrinsic/classify.py | seenu-andi-rajendran/plagcomps | 98e82cfb871f73bbd8f4ab1452c2b27a95beee83 | [
"MIT"
] | 2 | 2015-01-18T06:20:27.000Z | 2021-03-19T21:19:16.000Z | intrinsic/classify.py | NoahCarnahan/plagcomps | 98e82cfb871f73bbd8f4ab1452c2b27a95beee83 | [
"MIT"
] | null | null | null | intrinsic/classify.py | NoahCarnahan/plagcomps | 98e82cfb871f73bbd8f4ab1452c2b27a95beee83 | [
"MIT"
] | 2 | 2015-11-19T12:52:14.000Z | 2016-11-11T17:00:50.000Z | # classify.py
# Alternative methods to clustering
import sys, os
from random import shuffle
import cPickle
from collections import Counter
sys.path.append('../pybrain/') # add the pybrain module to the path... TODO: actually install it.
from plagcomps.shared.util import IntrinsicUtility
from ..dbconstants import username
from ..dbconstants import password
from ..dbconstants import dbname
'''
from pybrain.structure import FeedForwardNetwork, LinearLayer, SigmoidLayer, FullConnection, TanhLayer
from pybrain.tools.shortcuts import buildNetwork
from pybrain.datasets import SupervisedDataSet
from pybrain.utilities import percentError
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules import SoftmaxLayer
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.tools.customxml.networkreader import NetworkReader
from pybrain.structure.modules import BiasUnit
'''
import scipy
import sklearn
import sklearn.metrics
import matplotlib
import matplotlib.pyplot as pyplot
from pylab import ion, ioff, figure, draw, contourf, clf, show, hold, plot
from scipy import diag, arange, meshgrid, where
from numpy.random import multivariate_normal
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
# an Engine, which the Session will use for connection resources
url = "postgresql://%s:%s@%s" % (username, password, dbname)
engine = sqlalchemy.create_engine(url)
# create tables if they don't already exist
Base.metadata.create_all(engine)
# create a configured "Session" class
Session = sessionmaker(bind=engine)
if __name__ == '__main__':
session = Session()
features = ['average_sentence_length',
'average_syllables_per_word',
'avg_external_word_freq_class',
'avg_internal_word_freq_class',
'flesch_kincaid_grade',
'flesch_reading_ease',
'num_chars',
'punctuation_percentage',
'stopword_percentage',
'syntactic_complexity',
'syntactic_complexity_average']
num_hidden_layer_nodes = 20
num_files = 30
epochs = 400
filepath = os.path.join(os.path.dirname(__file__), "neural_networks/nn.xml")
NN = NeuralNetworkConfidencesClassifier()
NN.construct_and_train_nn(features, num_files, epochs, filepath, session)
| 37.635 | 119 | 0.659891 |
48e0a28c89b1ce15b99aa2daf6b83acba8204f1b | 4,316 | py | Python | matplotlib-3.4.3/matplotlib-3.4.3/examples/images_contours_and_fields/image_transparency_blend.py | JohnLauFoo/clc_packages_Yu | 259f01d9b5c02154ce258734d519ae8995cd0991 | [
"MIT"
] | 1 | 2021-11-13T17:21:44.000Z | 2021-11-13T17:21:44.000Z | matplotlib-3.4.3/matplotlib-3.4.3/examples/images_contours_and_fields/image_transparency_blend.py | JohnLauFoo/clc_packages_Yu | 259f01d9b5c02154ce258734d519ae8995cd0991 | [
"MIT"
] | null | null | null | matplotlib-3.4.3/matplotlib-3.4.3/examples/images_contours_and_fields/image_transparency_blend.py | JohnLauFoo/clc_packages_Yu | 259f01d9b5c02154ce258734d519ae8995cd0991 | [
"MIT"
] | null | null | null | """
==========================================
Blend transparency with color in 2D images
==========================================
Blend transparency with color to highlight parts of data with imshow.
A common use for `matplotlib.pyplot.imshow` is to plot a 2D statistical
map. The function makes it easy to visualize a 2D matrix as an image and add
transparency to the output. For example, one can plot a statistic (such as a
t-statistic) and color the transparency of each pixel according to its p-value.
This example demonstrates how you can achieve this effect.
First we will generate some data, in this case, we'll create two 2D "blobs"
in a 2D grid. One blob will be positive, and the other negative.
"""
# sphinx_gallery_thumbnail_number = 3
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import Normalize
# Generate the space in which the blobs will live
xmin, xmax, ymin, ymax = (0, 100, 0, 100)
n_bins = 100
xx = np.linspace(xmin, xmax, n_bins)
yy = np.linspace(ymin, ymax, n_bins)
# Generate the blobs. The range of the values is roughly -.0002 to .0002
means_high = [20, 50]
means_low = [50, 60]
var = [150, 200]
gauss_x_high = normal_pdf(xx, means_high[0], var[0])
gauss_y_high = normal_pdf(yy, means_high[1], var[0])
gauss_x_low = normal_pdf(xx, means_low[0], var[1])
gauss_y_low = normal_pdf(yy, means_low[1], var[1])
weights = (np.outer(gauss_y_high, gauss_x_high)
- np.outer(gauss_y_low, gauss_x_low))
# We'll also create a grey background into which the pixels will fade
greys = np.full((*weights.shape, 3), 70, dtype=np.uint8)
# First we'll plot these blobs using ``imshow`` without transparency.
vmax = np.abs(weights).max()
imshow_kwargs = {
'vmax': vmax,
'vmin': -vmax,
'cmap': 'RdYlBu',
'extent': (xmin, xmax, ymin, ymax),
}
fig, ax = plt.subplots()
ax.imshow(greys)
ax.imshow(weights, **imshow_kwargs)
ax.set_axis_off()
###############################################################################
# Blending in transparency
# ========================
#
# The simplest way to include transparency when plotting data with
# `matplotlib.pyplot.imshow` is to pass an array matching the shape of
# the data to the ``alpha`` argument. For example, we'll create a gradient
# moving from left to right below.
# Create an alpha channel of linearly increasing values moving to the right.
alphas = np.ones(weights.shape)
alphas[:, 30:] = np.linspace(1, 0, 70)
# Create the figure and image
# Note that the absolute values may be slightly different
fig, ax = plt.subplots()
ax.imshow(greys)
ax.imshow(weights, alpha=alphas, **imshow_kwargs)
ax.set_axis_off()
###############################################################################
# Using transparency to highlight values with high amplitude
# ==========================================================
#
# Finally, we'll recreate the same plot, but this time we'll use transparency
# to highlight the extreme values in the data. This is often used to highlight
# data points with smaller p-values. We'll also add in contour lines to
# highlight the image values.
# Create an alpha channel based on weight values
# Any value whose absolute value is > .0001 will have zero transparency
alphas = Normalize(0, .3, clip=True)(np.abs(weights))
alphas = np.clip(alphas, .4, 1) # alpha value clipped at the bottom at .4
# Create the figure and image
# Note that the absolute values may be slightly different
fig, ax = plt.subplots()
ax.imshow(greys)
ax.imshow(weights, alpha=alphas, **imshow_kwargs)
# Add contour lines to further highlight different levels.
ax.contour(weights[::-1], levels=[-.1, .1], colors='k', linestyles='-')
ax.set_axis_off()
plt.show()
ax.contour(weights[::-1], levels=[-.0001, .0001], colors='k', linestyles='-')
ax.set_axis_off()
plt.show()
#############################################################################
#
# .. admonition:: References
#
# The use of the following functions, methods, classes and modules is shown
# in this example:
#
# - `matplotlib.axes.Axes.imshow` / `matplotlib.pyplot.imshow`
# - `matplotlib.axes.Axes.contour` / `matplotlib.pyplot.contour`
# - `matplotlib.colors.Normalize`
# - `matplotlib.axes.Axes.set_axis_off`
| 34.528 | 79 | 0.657322 |
48e3db6b6aba7110ea8f3e0d1c747e61649abf82 | 634 | py | Python | tests/test_admin.py | FernandoCelmer/django-global-permissions | 1ece2b18476a514dec7b1e13a51191943acb460b | [
"MIT"
] | 30 | 2015-02-04T12:26:35.000Z | 2022-03-23T21:19:10.000Z | tests/test_admin.py | FernandoCelmer/django-global-permissions | 1ece2b18476a514dec7b1e13a51191943acb460b | [
"MIT"
] | 15 | 2015-11-27T17:42:02.000Z | 2022-03-23T00:34:10.000Z | tests/test_admin.py | FernandoCelmer/django-global-permissions | 1ece2b18476a514dec7b1e13a51191943acb460b | [
"MIT"
] | 15 | 2015-04-14T18:09:26.000Z | 2022-03-22T11:42:04.000Z | from django.test import TestCase
from django.core.urlresolvers import reverse
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError:
from django.contrib.auth.models import User
| 33.368421 | 95 | 0.749211 |
48e429fb8eb61c10b1ad429f9b2db275e7f48ee3 | 2,307 | py | Python | Models/utils.py | weslai/ecg_classification | 61cb45849485129cf04ee97f458fdf731353fd4b | [
"MIT"
] | 1 | 2020-12-03T13:34:04.000Z | 2020-12-03T13:34:04.000Z | Models/utils.py | weslai/ecg_classification | 61cb45849485129cf04ee97f458fdf731353fd4b | [
"MIT"
] | null | null | null | Models/utils.py | weslai/ecg_classification | 61cb45849485129cf04ee97f458fdf731353fd4b | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import itertools
import numpy as np
from sklearn.metrics import confusion_matrix
## be used to evaluate the model
## put the return to the plot_confusion_matrix
## confusion matrix
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
np.set_printoptions(precision=2)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
plt.figure(figsize=(10, 10))
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.show()
| 32.041667 | 91 | 0.640225 |
48e43797c38281b4f9c9d4f57ea6a962850d4cc0 | 44,206 | py | Python | evapotranspiration/penman_monteith_daily.py | JRoehrig/evapotranspiration | aeec040273e15f93bb25ff850b33a90a41c65291 | [
"MIT"
] | 2 | 2021-08-07T10:38:41.000Z | 2022-03-02T07:34:11.000Z | evapotranspiration/penman_monteith_daily.py | JRoehrig/evapotranspiration | aeec040273e15f93bb25ff850b33a90a41c65291 | [
"MIT"
] | null | null | null | evapotranspiration/penman_monteith_daily.py | JRoehrig/evapotranspiration | aeec040273e15f93bb25ff850b33a90a41c65291 | [
"MIT"
] | null | null | null | import math
import numpy as np
import pandas as pd
def bulk_surface_resistance(self):
r"""Return (bulk) surface resistance (:math:`r_s`) *[s/m]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 5, p. 21):
.. math::
r_s = \frac{ r_l } { LAI_{active} }
where:
:math:`r_l` --- the bulk stomatal resistance of the well-illuminated leaf *[s/m]*
:math:`LAI_{active}` --- the active (sunlit) leaf area index *[m (leaf area) / m (soil surface)]*
A general equation for :math:`LAI_{active}` is:
.. math::
LAI_{active} = 0.5 LAI
with:
.. math::
LAI = 24 h
where :math:`h` is an optional input parameter in :class:`PenmanMonteithDaily`.
:return: (*float*) (bulk) surface resistance :math:`r_s` *[s/m]*
"""
#
# active (sunlit) leaf area index [m^2 (leaf area) / m^2 (soil surface)]
lai_active = self.lai_active_factor * self.lai
rs = self.rl / lai_active
return rs
def et0(self, **kwargs):
r"""Returns potential evapotranspiration (:math:`ETo`) *[mm/day]* as described in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_. Reference
(grass) potencial evapotranspiration is returned for default constructor values. If values in `**kwargs` are
arrays, their lengths must be the same.
:Keyword Arguments:
* **date** (*str, datetime.date, datetime.datetime, pandas.TimeStamp, or np.array*)
* **doy** (*int or np.array*) - day of the year (:math:`J`) *[-]*. Range: :math:`1 \leq J \leq 366`.
It is not used if date is given
* **u2** (*float or np.array*) - wind speed at 2 meters above ground surface *[m/s]*
* **uz** (*float or np.array*) - measured wind speed at :math:`z` meters above ground surface *[m/s]*
* **z** (*float or np.array*) - height of measurement above ground surface *[m]*
* **t_mean** (*float or np.array*) - daily mean air temperature *[C]*
* **t_min** (*float or np.array*) - daily minimum air temperature *[C]*
* **t_max** (*float or np.array*) - daily maximum air temperature *[C]*
* **rh_mean** (*float or np.array*) - daily mean relative humidity *[%]*
* **rh_min** (*float or np.array*) - daily minimum relative humidity *[%]*
* **rh_max** (*float or np.array*) - daily maximum relative humidity *[%]*
* **rs** (*float or np.array*) - solar or shortwave radiation *[MJ/mday]*
* **n** (*float or np.array*) - daily actual duration of sunshine or cloudless hours *[hour]*
* **g** (*float or np.array*) - soil heat flux density *[MJ/mday]*. If not given, *g* defined in
:meth:`PenmanMonteithDaily` will be used
* **a_s** (*float or np.array*) - see :meth:`shortwave_radiation`. Default :math:`a_s = 0.25`
* **b_s** (*float or np.array*) - see :meth:`shortwave_radiation`. Default :math:`b_s = 0.50`
* **negative_rnl** (*bool*) - allow negative net longwave radiation. Default :math:`negative\_rnl=True`
* **negative_et0** (*bool*) - allow negative reference evapotranspiration. Default :math:`negative\_et0=True`
:return: (*float or np.array*) potential evapotranspiration (:math:`ETo`) *[mm/day]*
Cases:
* If date and doy are given, :math:`doy` is disregarded
* if :math:`uz` is given, :math:`z` must also be given
* if :math:`u2` and (:math:`uz`, :math:`z`) are given, both :math:`uz` and :math:`z` are disregarded
* if :math:`rs` and :math:`n` are given, :math:`n` will be disregarded
* The best options for air temperature are, in this order: 1) t_min, t_max, and t_mean, 2) t_min, t_max, and
3) tmean
* The best options for relative air humidity are, in this order: 1) rh_max and rh_min, 2) rh_max, and 3)
rh_mean
Example 1::
>>> from evapotranspiration.penman_monteith_daily import PenmanMonteithDaily
>>> pm = PenmanMonteithDaily(elevation=100, latitude=50.80)
>>> et0 = pm.et0(doy=187, u2=2.078, t_min=12.3, t_max=21.5, rh_min=63, rh_max=84, n=9.25)
>>> print(et0)
3.872968723753793
Example 2::
>>> from evapotranspiration.penman_monteith_daily import PenmanMonteithDaily
>>> pm = PenmanMonteithDaily(elevation=100, latitude=50.80)
>>> et0 = pm.et0(date='2001-07-06', u2=2.078, t_min=12.3, t_max=21.5, rh_min=63, rh_max=84, n=9.25)
>>> print(et0)
3.872968723753793
Example 3::
>>> from evapotranspiration.penman_monteith_daily import PenmanMonteithDaily
>>> pm = PenmanMonteithDaily(elevation=100, latitude=50.80)
>>> date=np.array(['2001-07-06', '2001-07-06'])
>>> u2=np.array([2.078, 2.078])
>>> t_min=np.array([12.3, 12.3])
>>> t_max=np.array([21.5, 21.5])
>>> rh_min=np.array([63, 63])
>>> rh_max=np.array([84, 84])
>>> n=np.array([9.25, 9.25])
>>> et0 = pm.et0(date=date, u2=u2, t_min=t_min, t_max=t_max, rh_min=rh_min, rh_max=rh_max, n=n)
>>> print(et0)
[3.87296872 3.87296872]
"""
self.reset()
try:
self.u2 = kwargs.get('u2', None)
if self.u2 is None:
self.u2 = self.to_u2(kwargs['uz'], kwargs['z'])
except KeyError:
raise KeyError('Penmam-Monteith: Either u2 or both uz and z must be given')
t_min = kwargs.get('t_min', None)
if t_min is None:
t_min = kwargs['t_mean']
t_max = kwargs.get('t_max', None)
if t_max is None:
t_max = kwargs['t_mean']
t_mean = kwargs.get('t_mean', None)
rh_min = kwargs.get('rh_min', None)
rh_max = kwargs.get('rh_max', None)
if rh_max is not None:
if rh_min is None:
rh_min = rh_max
else:
rh_min = rh_max = kwargs['rh_mean']
self.doy = kwargs.get('doy', None)
if self.doy is None:
self.doy = pd.to_datetime(kwargs['date']).dayofyear
self.rs = kwargs.get('rs', None)
n = kwargs.get('n', None)
g = kwargs.get('g', None)
if g is None:
g = self.g_default
a_s = kwargs.get('a_s', 0.25)
b_s = kwargs.get('b_s', 0.50)
if t_mean is None:
t_mean = (t_min + t_max) / 2.0
self.ld = PenmanMonteithDaily.latent_heat_of_vaporization(t_mean)
# In FAO 56, where delta occurs in the numerator and denominator, the slope
# of the vapour pressure curve is calculated using mean air temperature (Equation 9)
self.s = PenmanMonteithDaily.slope_of_saturation_vapour_pressure_curve(t_mean)
self.pc = PenmanMonteithDaily.psychrometric_constant(self.p, lamda=self.ld)
self.es = PenmanMonteithDaily.saturation_vapour_pressure(t_min, t_max)
self.ea = PenmanMonteithDaily.actual_vapour_pressure(rh_min=rh_min, rh_max=rh_max, t_min=t_min, t_max=t_max)
try:
self.ra = np.array([self.ra_366[i] for i in self.doy])
self.rs0 = np.array([self.rs0_366[i] for i in self.doy])
if self.rs is None:
self.mn = np.array([self.daylight_hours_366[i] for i in self.doy])
self.rs = self.shortwave_radiation(self.ra, n, self.mn, a_s, b_s)
# FAO56 eq. 39. The Rs/Rso term in equation 39 must be limited so that Rs/Rso 1.0.
self.rs = np.where(self.rs > self.rs0, self.rs0, self.rs)
except TypeError:
self.ra = self.ra_366[self.doy]
self.rs0 = self.rs0_366[self.doy]
if self.rs is None:
self.mn = self.daylight_hours_366[self.doy]
self.rs = self.shortwave_radiation(self.ra, n, self.mn, a_s, b_s)
# FAO56 eq. 39. The Rs/Rso term in equation 39 must be limited so that Rs/Rso 1.0.
self.rs = self.rs0 if self.rs > self.rs0 else self.rs
self.rns = self.net_shortwave_radiation(self.rs, self.albedo)
self.rnl = self.net_longwave_radiation(t_min, t_max, self.rs, self.rs0, self.ea)
if kwargs.get('negative_rnl', False) and self.rnl < 0.0:
self.rnl = 0.0
self.rn = self.rns - self.rnl
# denominator of FAO 56 eq. 3
etd = self.ld * (self.s + self.pc * (1 + self.f2 * self.u2))
# ETo energy component of FAO 56 eq. 3
self.etr = self.s * (self.rn - g) / etd
# ETo wind component of FAO 56 eq. 3
self.etw = (self.ld * self.pc * self.u2 * self.f1 * (self.es - self.ea) / (t_mean + 273.0)) / etd
# Reference evapotranspiration
self.et = self.etr + self.etw
self.et = np.where(self.et < 0.0, 0.0, self.et)
try:
self.et = float(self.et)
except TypeError:
pass
if kwargs.get('negative_rnl', False) and self.et < 0.0:
self.et = 0.0
return self.et
def et0_frame(self, df, **kwargs):
"""Return the input DataFrame extended by :meth:`et0` and further calculation parameters.
:param df: pandas DataFrame with columns corresponding to the inputs described in :meth:`et0`
:type df: pandas.DataFrame
:Keyword Arguments:
* **show_all** (*bool*) - show all results if :math:`True`, otherwise set `parameter=True` to show individual
parameters. For example :math:`doy=True`, :math:`ld=True`, etc. See :meth:`PenmanMonteithDaily`
:return: (*pandas.DataFrame*) DataFrame
"""
doy_str = kwargs.get('doy', 'doy')
date_str = kwargs.get('date', 'date')
u2_str = kwargs.get('u2', 'u2')
uz_str = kwargs.get('uz', 'uz')
z_str = kwargs.get('z', 'z')
t_mean_str = kwargs.get('t_mean', 't_mean')
t_min_str = kwargs.get('t_min', 't_min')
t_max_str = kwargs.get('t_max', 't_max')
rh_mean_str = kwargs.get('rh_mean', 'rh_mean')
rh_min_str = kwargs.get('rh_min', 'rh_min')
rh_max_str = kwargs.get('rh_max', 'rh_max')
rs_str = kwargs.get('rs', 'rs')
n_str = kwargs.get('n', 'n')
g_str = kwargs.get('g', 'g')
columns = df.columns
doy = df[doy_str].values if doy_str in columns else None
date = df[date_str].values if date_str in columns else None
u2 = df[u2_str].values if u2_str in columns else None
uz = df[uz_str].values if uz_str in columns else None
z = df[z_str].values if z_str in columns else None
t_mean = df[t_mean_str].values if t_mean_str in columns else None
t_min = df[t_min_str].values if t_min_str in columns else None
t_max = df[t_max_str].values if t_max_str in columns else None
rh_mean = df[rh_mean_str].values if rh_mean_str in columns else None
rh_min = df[rh_min_str].values if rh_min_str in columns else None
rh_max = df[rh_max_str].values if rh_max_str in columns else None
rs = df[rs_str].values if rs_str in columns else None
n = df[n_str].values if n_str in columns else None
g = df[g_str].values if g_str in columns else None
self.et0(doy=doy, date=date, u2=u2, uz=uz, z=z, t_mean=t_mean, t_min=t_min, t_max=t_max,
rh_mean=rh_mean, rh_min=rh_min, rh_max=rh_max, rs=rs, n=n, g=g)
show_all = kwargs.get('show_all', True)
if show_all:
if doy is None:
df['DoY'] = self.doy
df['Lambda'] = self.ld
df['Psy'] = self.pc
df['Delta'] = self.s
df['es'] = self.es
df['ea'] = self.ea
df['Rs'] = self.rs
df['Rns'] = self.rns
df['Rnl'] = self.rnl
df['ET0r'] = self.etr
df['ET0w'] = self.etw
df['ET0'] = self.et
else:
if kwargs.get('Lambda', False):
df['Lambda'] = self.ld
if kwargs.get('Psy', False):
df['Psy'] = self.pc
if kwargs.get('Delta', False):
df['Delta'] = self.s
if kwargs.get('es', False):
df['es'] = self.es
if kwargs.get('ea', False):
df['ea'] = self.ea
if kwargs.get('Rs', False):
df['Rs'] = self.rs
if kwargs.get('Rns', False):
df['Rns'] = self.rns
if kwargs.get('Rnl', False):
df['Rnl'] = self.rnl
if kwargs.get('ET0r', False):
df['ET0r'] = self.etr
if kwargs.get('ET0w', False):
df['ET0w'] = self.etw
if kwargs.get('ET0', True):
df['ET0'] = self.et
return df
| 46.14405 | 120 | 0.58474 |
48e612645ef11a151beea876541ffc2a70be93e5 | 5,123 | py | Python | src/cnc-app-name/views.py | scotchoaf/cnc-skeleton | 2116bf3d61fc1ed834daeaa146f5730713300010 | [
"MIT"
] | null | null | null | src/cnc-app-name/views.py | scotchoaf/cnc-skeleton | 2116bf3d61fc1ed834daeaa146f5730713300010 | [
"MIT"
] | null | null | null | src/cnc-app-name/views.py | scotchoaf/cnc-skeleton | 2116bf3d61fc1ed834daeaa146f5730713300010 | [
"MIT"
] | 1 | 2019-04-08T14:54:12.000Z | 2019-04-08T14:54:12.000Z | # Copyright (c) 2018, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Author: $YOURNAME and $EMAIL
"""
Palo Alto Networks cnc-skeleton
This software is provided without support, warranty, or guarantee.
Use at your own risk.
"""
from django import forms
from django.contrib import messages
from django.shortcuts import HttpResponseRedirect
# Every app will need to import at least the CNCBaseFormView
from pan_cnc.views import CNCBaseFormView, ProvisionSnippetView
# All class attributes can be defined here or in the .pan-cnc.yaml
# In this case, we have defined class level attributes there. This makes it possible to
# create apps while writing no code at all. Just create a view in the .pan-cnc.yaml based on a
# CNCBaseFormView and configure the attributes as needed.
# If you want additional logic, then you subclass the CNCBaseFormView and add your logic there.
# The two main methods to override are 'generate_dynamic_form' and 'form_valid'.
#
# generate_dynamic_form gets called before the web form is created and displayed to the user
#
# form_valid is called after they submit the form
#
# Again override the ProvisionSnippetView as we are only building a workflow here.
# CNCBaseFormView will only display the form and perform a redirect after 'form_valid'
# however, ProvisionSnippetView will actually redirect to another CNC class based in the skillet type
# I.e. this is where the logic of how to interact with APIs, PAN-OS devies, render templates, etc is all done
# You usually want a child of this class to the 'last' in a chain if you need extended logic
| 44.163793 | 111 | 0.728479 |
48e75715f9ebbd7bc9cad087839a0b649f005b70 | 1,312 | py | Python | tcc_server/emulatorRPi.py | MegaNo0body/tcc | 469824a8afc1cf846793212d42f6c8c43ee4b0bf | [
"MIT"
] | 1 | 2016-09-29T22:39:31.000Z | 2016-09-29T22:39:31.000Z | tcc_server/emulatorRPi.py | MegaNo0body/tcc | 469824a8afc1cf846793212d42f6c8c43ee4b0bf | [
"MIT"
] | null | null | null | tcc_server/emulatorRPi.py | MegaNo0body/tcc | 469824a8afc1cf846793212d42f6c8c43ee4b0bf | [
"MIT"
] | null | null | null | import sys
from time import sleep
from random import randint
from urllib.request import urlopen
from urllib.parse import urlencode
if len(sys.argv) != 2:
print('Por favor, usar: ' + sys.argv[0] + ' {idSensor}')
print('Exemplo: ' + sys.argv[0] + ' 8')
else:
sensorId = sys.argv[1]
URL_SERVICO = 'http://127.0.0.1:8081/tcc/sensor/' + sensorId + '/inserir'
VARIACAO_MAXIMA = 5
valores = {
'Chuva': 80.0,
'UmidadeAr': 85.0,
'UmidadeSolo': 80.0,
'TemperaturaAr': 30.0,
'TemperaturaSolo': 25.0
}
variacao = {}
for k in valores:
valores[k] = valores[k] + randint(-3, +3) / 10
variacao[k] = 0.0
accel = {}
while True:
for k in variacao:
accel[k] = randint(-1.0, +1.0) / 10
r = randint(10, 30)
for i in range(r):
data = {}
for k in variacao:
variacao[k] = variacao[k] + accel[k]
variacao[k] = max(variacao[k], -VARIACAO_MAXIMA)
variacao[k] = min(variacao[k], +VARIACAO_MAXIMA)
data[k] = '%.2f' % (valores[k] + round(variacao[k], 2))
data = urlencode(data)
print(data)
urlopen(URL_SERVICO, data.encode('ascii'))
sleep(0.50)
| 31.238095 | 77 | 0.51753 |
48e7717d4dc4d7ba6b003ee81bea9813e26ea8e2 | 1,487 | py | Python | sayn/logging/file_logger.py | robin-173/sayn | d1cf36b92fad6a1798b57ad80abb22e8386e0e86 | [
"Apache-2.0"
] | 105 | 2020-04-23T17:04:34.000Z | 2022-03-18T15:47:52.000Z | sayn/logging/file_logger.py | robin-173/sayn | d1cf36b92fad6a1798b57ad80abb22e8386e0e86 | [
"Apache-2.0"
] | 53 | 2020-06-12T14:41:12.000Z | 2022-01-24T13:04:58.000Z | sayn/logging/file_logger.py | robin-173/sayn | d1cf36b92fad6a1798b57ad80abb22e8386e0e86 | [
"Apache-2.0"
] | 9 | 2020-04-23T16:56:23.000Z | 2021-08-16T10:54:48.000Z | from pathlib import Path
import logging
from .logger import Logger
from .log_formatter import LogFormatter
| 28.056604 | 63 | 0.543376 |
48e84fceaf520fea1c5ef759977376465d7f8dcf | 1,514 | py | Python | tests/test_docs.py | gitter-badger/pygsuite | 536766c36f653edbc7585141f1c3327f508e19da | [
"MIT"
] | null | null | null | tests/test_docs.py | gitter-badger/pygsuite | 536766c36f653edbc7585141f1c3327f508e19da | [
"MIT"
] | null | null | null | tests/test_docs.py | gitter-badger/pygsuite | 536766c36f653edbc7585141f1c3327f508e19da | [
"MIT"
] | null | null | null | from pygsuite import DefaultFonts, TextStyle, Color
from pygsuite.docs.doc_elements.paragraph import Paragraph
BRIGHT_GREEN_HEX = "#72FF33"
| 33.644444 | 90 | 0.707398 |
48e92b16767155e8dc5662502fba6db4a07dc542 | 71,657 | py | Python | neutra/vae.py | dieterichlawson/google-research | 7ca9a612aa4239533c6ed8ef98543f9780d19f2b | [
"Apache-2.0"
] | 4 | 2020-02-04T16:23:45.000Z | 2021-08-30T11:56:01.000Z | neutra/vae.py | lceustc/google-research | bf793f31022db2636f42e132198ffe8bd9631b58 | [
"Apache-2.0"
] | 10 | 2020-09-26T00:19:12.000Z | 2022-03-12T00:04:29.000Z | neutra/vae.py | lceustc/google-research | bf793f31022db2636f42e132198ffe8bd9631b58 | [
"Apache-2.0"
] | 1 | 2020-02-29T05:06:38.000Z | 2020-02-29T05:06:38.000Z | # coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
# pylint: disable=invalid-name,g-bad-import-order,missing-docstring
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import os
from absl import app
from absl import flags
from concurrent import futures
import gin
import numpy as np
from six.moves import range
from six.moves import zip
import tensorflow as tf
import tensorflow_probability as tfp
from typing import Any, Dict, List, Optional, Tuple
from neutra import utils
tfd = tfp.distributions
tfb = tfp.bijectors
FLAGS = flags.FLAGS
TRAIN_BATCH = 250
TEST_BATCH = 1000
AIS_BATCH = 50
def IndependentBernouli3D(logits):
return tfd.Independent(
tfd.Bernoulli(logits=logits), reinterpreted_batch_ndims=3)
def IndependentDiscreteLogistic3D(locations,
scales):
dist = tfd.TransformedDistribution(
distribution=tfd.Logistic(loc=locations, scale=scales),
bijector=tfb.AffineScalar(scale=255.0))
dist = tfd.QuantizedDistribution(distribution=dist, low=0., high=255.0)
dist = tfd.Independent(dist, reinterpreted_batch_ndims=3)
return ScaleHack(dist)
def IndependentDiscreteLogistic3D2(locations,
scales):
return IndependentDiscreteLogistic(locations, scales)
class FlipImageBijector(tfb.Bijector):
def __init__(self, validate_args=False, name=None):
"""Creates the `Permute` bijector.
Args:
permutation: An `int`-like vector-shaped `Tensor` representing the
permutation to apply to the rightmost dimension of the transformed
`Tensor`.
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str`, name given to ops managed by this object.
Raises:
TypeError: if `not permutation.dtype.is_integer`.
ValueError: if `permutation` does not contain exactly one of each of
`{0, 1, ..., d}`.
"""
super(FlipImageBijector, self).__init__(
forward_min_event_ndims=3,
is_constant_jacobian=True,
validate_args=validate_args,
name=name or "flip_image")
VAEOutputs = collections.namedtuple(
"VAEOutputs", "log_p_x_z, elbo, sample_means, recon_means, klqp, total_klqp, post_z, prior_z")
AISOutputs = collections.namedtuple(
"AISOutputs",
"log_p, p_accept, z_fin, recon"
)
DLGMOutputs = collections.namedtuple(
"DLGMOutputs",
"elbo, sample_means, mcmc_log_p, recon_means, p_accept, post_z, post_z_chain, q_z, xentpq"
)
def Eval(model, dataset, train_dir, eval_dir, master,
use_polyak_averaging=False, max_number_of_evaluations=None):
data_idx, images = dataset.TestBatch(TEST_BATCH)
eval_op = model.EvalOp(data_idx, images)
utils.LogAndSaveHParams()
tf.train.get_or_create_global_step()
if use_polyak_averaging:
tf.logging.info("Using polyak averaging")
ema = tf.train.ExponentialMovingAverage(decay=0.99)
saver = tf.train.Saver(ema.variables_to_restore())
else:
saver = tf.train.Saver()
scaffold = tf.train.Scaffold(saver=saver)
tf.Session.reset(master)
hooks = [
# Just for logging.
tf.contrib.training.StopAfterNEvalsHook(dataset.test_size // TEST_BATCH),
tf.contrib.training.SummaryAtEndHook(eval_dir),
tf.train.LoggingTensorHook(utils.GetLoggingOutputs(), at_end=True)
]
tf.contrib.training.evaluate_repeatedly(
train_dir,
eval_ops=eval_op,
hooks=hooks,
# LOL...
eval_interval_secs=120,
max_number_of_evaluations=max_number_of_evaluations,
master=master,
scaffold=scaffold)
def AISEvalShard(shard, master, num_workers, num_chains, dataset, use_polyak_averaging, writer, train_dir, model_fn, batch):
tf.logging.info("Thread started")
model = model_fn()
tf.logging.info("Built model")
shard_idx = tf.placeholder(tf.int64, [])
tf.logging.info("built data")
data_iterator = dataset.AISIterator(batch, shard_idx, num_workers)
images, _ = data_iterator.get_next()
tf.logging.info("Built mA")
ais_outputs = model.AIS(images, num_chains)
log_p = ais_outputs.log_p
p_accept = ais_outputs.p_accept
tf.logging.info("Built mB")
if shard == 1:
utils.LogAndSaveHParams()
summary_op = tf.summary.merge_all()
global_step = tf.train.get_or_create_global_step()
if use_polyak_averaging:
tf.logging.info("Using polyak averaging")
ema = tf.train.ExponentialMovingAverage(decay=0.99)
saver = tf.train.Saver(ema.variables_to_restore())
else:
saver = tf.train.Saver()
tf.logging.info("Built mC")
global_step_val = []
tf.logging.info("Starting shard %d, %s", shard, master)
#with tf.MonitoredSession(
# tf.train.ChiefSessionCreator(
# master=master,
# checkpoint_dir=train_dir)) as sess:
while True:
try:
tf.Session.reset(master)
with tf.Session(master) as sess:
all_log_p = np.zeros([0])
saver.restore(sess, tf.train.latest_checkpoint(train_dir))
sess.run(data_iterator.initializer, {shard_idx: shard})
try:
step_num = 0
while True:
fetch = {
"log_p": log_p,
"global_step": global_step,
"p_accept": p_accept
}
if shard == 0:
fetch["summary"] = summary_op
tf.logging.info("Shard %d step %d started.", shard, step_num)
fetch = sess.run(fetch)
tf.logging.info("Shard %d step %d done.", shard, step_num)
tf.logging.info("Shard %d log_p %.2f, p_accept: %.2f", shard,
np.mean(fetch["log_p"]),
np.mean(fetch["p_accept"]))
all_log_p = np.hstack([all_log_p, fetch["log_p"]])
if shard == 0 and step_num == 0:
global_step_val.append(fetch["global_step"])
writer.add_summary(fetch["summary"], global_step_val[0])
step_num += 1
except tf.errors.OutOfRangeError:
tf.logging.info("Shard %d done.", shard)
pass
return all_log_p
except tf.errors.AbortedError:
pass
MODEL_TO_CLASS = {"vae": VAE, "dlgm": DLGM}
if __name__ == "__main__":
flags.DEFINE_string("mnist_data_dir", "", "")
flags.DEFINE_string("fashion_mnist_data_dir", "", "")
flags.DEFINE_string("cifar10_data_dir", "", "")
flags.DEFINE_string("data_type", "mnist", "")
flags.DEFINE_enum("mode", "train", ["train", "eval", "ais_eval", "ais_eval2"], "")
flags.DEFINE_enum("model", "vae", list(MODEL_TO_CLASS.keys()), "")
flags.DEFINE_string("train_dir", "/tmp/vae/train", "")
flags.DEFINE_string("eval_dir", "/tmp/vae/eval", "")
flags.DEFINE_string("master", "", "")
flags.DEFINE_string("ais_worker_pattern", "", "")
flags.DEFINE_integer("ais_shard", 0, "")
flags.DEFINE_integer("ais_num_workers", 1, "")
flags.DEFINE_integer("ais_num_chains", 1, "")
flags.DEFINE_integer("ais_num_replicas", 1, "")
flags.DEFINE_list("ais_replicas", "", "Manual listing of replicas")
flags.DEFINE_integer("ais_batch_size", 25, "")
flags.DEFINE_float("polyak_averaging", 0.0, "")
flags.DEFINE_boolean("test_is_valid", False, "")
flags.DEFINE(utils.YAMLDictParser(), "hparams", "", "")
app.run(main)
| 35.057241 | 124 | 0.651744 |
48e948236c66512a216844a7ad0e87904606f55a | 2,034 | py | Python | flask_oauth2_login/base.py | BasicBeluga/flask-oauth2-login | 5a12ec70bcea72b2de079c072213be54f29b70b7 | [
"MIT"
] | 42 | 2015-01-13T08:51:04.000Z | 2022-01-14T04:15:31.000Z | flask_oauth2_login/base.py | BasicBeluga/flask-oauth2-login | 5a12ec70bcea72b2de079c072213be54f29b70b7 | [
"MIT"
] | 5 | 2015-04-29T19:31:11.000Z | 2020-03-28T19:37:43.000Z | flask_oauth2_login/base.py | BasicBeluga/flask-oauth2-login | 5a12ec70bcea72b2de079c072213be54f29b70b7 | [
"MIT"
] | 28 | 2015-06-16T20:30:40.000Z | 2021-04-08T15:33:10.000Z | from flask import request, session, url_for
from requests_oauthlib import OAuth2Session
| 24.214286 | 77 | 0.675025 |
48ea7b107947ea8206fa8a2bda41ca826b065a52 | 7,530 | py | Python | segmentation/utils/transforms.py | voldemortX/DST-CBC | e392313c129f6814c1a1c0f20c0abbd5505c3d7d | [
"BSD-3-Clause"
] | 103 | 2020-04-21T01:25:16.000Z | 2022-03-24T07:45:45.000Z | segmentation/utils/transforms.py | voldemortX/DST-CBC | e392313c129f6814c1a1c0f20c0abbd5505c3d7d | [
"BSD-3-Clause"
] | 13 | 2021-03-24T06:52:21.000Z | 2022-01-18T08:17:50.000Z | segmentation/utils/transforms.py | voldemortX/DST-CBC | e392313c129f6814c1a1c0f20c0abbd5505c3d7d | [
"BSD-3-Clause"
] | 12 | 2020-04-29T02:33:11.000Z | 2021-12-28T07:59:20.000Z | # Mostly copied and modified from torch/vision/references/segmentation to support unlabeled data
# Copied functions from fmassa/vision-1 to support multi-dimensional masks loaded from numpy ndarray
import numpy as np
from PIL import Image
import random
import torch
import utils.functional as F
# For 2/3 dimensional tensors only
# Pad image with zeros, yet pad target with 255 (ignore label) on bottom & right if
# given a bigger desired size (or else nothing is done at all)
# Init with a python list as the map(mainly for cityscapes's id -> train_id)
| 34.541284 | 114 | 0.6 |
48ea83dadb4e88f0d593497119582f4e6d402985 | 9,036 | py | Python | server.py | drunkHatch/CMPUT404-assignment-webserver | 37336241ae790509804569834e2063893d37db44 | [
"Apache-2.0"
] | null | null | null | server.py | drunkHatch/CMPUT404-assignment-webserver | 37336241ae790509804569834e2063893d37db44 | [
"Apache-2.0"
] | null | null | null | server.py | drunkHatch/CMPUT404-assignment-webserver | 37336241ae790509804569834e2063893d37db44 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
import socketserver
import re
import socket
import datetime
import os
import mimetypes as MT
import sys
# Copyright 2013 Abram Hindle, Eddie Antonio Santos
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Furthermore it is derived from the Python documentation examples thus
# some of the code is Copyright 2001-2013 Python Software
# Foundation; All Rights Reserved
#
# http://docs.python.org/2/library/socketserver.html
#
# run: python freetests.py
# try: curl -v -X GET http://127.0.0.1:8080/
# status codes could be handled
STATUS_CODE_RESPONSE = {
0: " 0 Surprise!",
200: " 200 OK",
301: " 301 Moved Permanently",
404: " 404 Not Found",
405: " 405 Method Not Allowed"
}
# methods could be handled
HTTP_REQUEST_METHODS = {
"GET": 1,
}
# some hard coded text
END_OF_LINE_RESPONSE = "\r\n"
PROTOCOL_RESPONSE = "HTTP/1.1"
DIRECTORY_TO_SERVE = "www"
# open file error here
GOODFILE = 1
ISADIRECTORY = 2
NOFILE = 3
# response generate class
# request for storing received request attributes
# argument: requested url
# return value: open file result, opened file object, local path
def openRequestedFile(client_request_url):
cru = client_request_url
if cru[-1] == r'/':
cru += "index.html"
complete_path = DIRECTORY_TO_SERVE + cru
try:
result = open(complete_path, 'rb')
content_type = cru.split(".")
return GOODFILE, result, cru
except IsADirectoryError as e:
return ISADIRECTORY, None, None
except FileNotFoundError as n:
return NOFILE, None, None
# check type and error of opened file
# SECURITY: check the permission of opened file
if __name__ == "__main__":
HOST, PORT = "localhost", 8080
socketserver.TCPServer.allow_reuse_address = True
# Create the server, binding to localhost on port 8080
server = socketserver.TCPServer((HOST, PORT), MyWebServer)
# https://stackoverflow.com/questions/15260558/python-tcpserver-address-already-in-use-but-i-close-the-server-and-i-use-allow
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
try:
server.serve_forever()
except KeyboardInterrupt: # exit if ctrl+C
sys.exit(0)
| 34.888031 | 129 | 0.623174 |
48ebc333c8d0ba26cd1d7f0f9c59510601ab4ec4 | 1,788 | py | Python | cloudkitty/rating/hash/controllers/root.py | wanghuiict/cloudkitty | 11ff713042eb0354f497f7051130630c46860735 | [
"Apache-2.0"
] | 97 | 2015-10-18T02:53:17.000Z | 2022-03-07T05:15:39.000Z | cloudkitty/rating/hash/controllers/root.py | shanafang9/cloudkitty | 911c90569ccb09ecf0d7aa11a5a707c8ebda09cf | [
"Apache-2.0"
] | 1 | 2017-11-29T15:39:27.000Z | 2017-11-29T15:39:27.000Z | cloudkitty/rating/hash/controllers/root.py | shanafang9/cloudkitty | 911c90569ccb09ecf0d7aa11a5a707c8ebda09cf | [
"Apache-2.0"
] | 54 | 2015-10-27T10:55:02.000Z | 2022-02-18T08:23:19.000Z | # -*- coding: utf-8 -*-
# Copyright 2015 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from cloudkitty import rating
from cloudkitty.rating.hash.controllers import field as field_api
from cloudkitty.rating.hash.controllers import group as group_api
from cloudkitty.rating.hash.controllers import mapping as mapping_api
from cloudkitty.rating.hash.controllers import service as service_api
from cloudkitty.rating.hash.controllers import threshold as threshold_api
from cloudkitty.rating.hash.datamodels import mapping as mapping_models
| 38.042553 | 78 | 0.758949 |
48ec30ea94720d1931e1f3786be697d0ca01359f | 8,431 | py | Python | .test/test/task2/Aufgabe1/python-lib/cuddlefish/docs/webdocs.py | sowinski/testsubtree | d09b72e6b366e8e29e038445a1fa6987b2456625 | [
"MIT"
] | null | null | null | .test/test/task2/Aufgabe1/python-lib/cuddlefish/docs/webdocs.py | sowinski/testsubtree | d09b72e6b366e8e29e038445a1fa6987b2456625 | [
"MIT"
] | null | null | null | .test/test/task2/Aufgabe1/python-lib/cuddlefish/docs/webdocs.py | sowinski/testsubtree | d09b72e6b366e8e29e038445a1fa6987b2456625 | [
"MIT"
] | null | null | null | import os, re, errno
import markdown
import cgi
from cuddlefish import packaging
from cuddlefish.docs import apirenderer
from cuddlefish._version import get_versions
INDEX_PAGE = '/doc/static-files/base.html'
BASE_URL_INSERTION_POINT = '<base '
VERSION_INSERTION_POINT = '<div id="version">'
THIRD_PARTY_PACKAGE_SUMMARIES = '<ul id="third-party-package-summaries">'
HIGH_LEVEL_PACKAGE_SUMMARIES = '<ul id="high-level-package-summaries">'
LOW_LEVEL_PACKAGE_SUMMARIES = '<ul id="low-level-package-summaries">'
CONTENT_ID = '<div id="main-content">'
TITLE_ID = '<title>'
DEFAULT_TITLE = 'Add-on SDK Documentation'
| 44.373684 | 94 | 0.632428 |
48eca2b30f95acacb8513624eb0235e73603734b | 183 | py | Python | src/c3nav/site/templatetags/route_render.py | johnjohndoe/c3nav | a17f863a3512e305595c16b0300796b6bae81241 | [
"Apache-2.0"
] | 132 | 2016-11-12T01:45:23.000Z | 2022-03-08T15:17:10.000Z | src/c3nav/site/templatetags/route_render.py | johnjohndoe/c3nav | a17f863a3512e305595c16b0300796b6bae81241 | [
"Apache-2.0"
] | 66 | 2016-09-29T09:46:19.000Z | 2022-03-11T23:26:18.000Z | src/c3nav/site/templatetags/route_render.py | johnjohndoe/c3nav | a17f863a3512e305595c16b0300796b6bae81241 | [
"Apache-2.0"
] | 42 | 2016-09-29T08:34:57.000Z | 2022-03-08T15:17:15.000Z | from django import template
register = template.Library()
| 13.071429 | 29 | 0.726776 |
48edc6b7f87e0875d85de78f96a9bd1a71a88a84 | 9,827 | py | Python | coax/experience_replay/_prioritized.py | sleepy-owl/coax | 37c3e667b81537768beb25bb59d0f05124624128 | [
"MIT"
] | null | null | null | coax/experience_replay/_prioritized.py | sleepy-owl/coax | 37c3e667b81537768beb25bb59d0f05124624128 | [
"MIT"
] | null | null | null | coax/experience_replay/_prioritized.py | sleepy-owl/coax | 37c3e667b81537768beb25bb59d0f05124624128 | [
"MIT"
] | null | null | null | # ------------------------------------------------------------------------------------------------ #
# MIT License #
# #
# Copyright (c) 2020, Microsoft Corporation #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software #
# and associated documentation files (the "Software"), to deal in the Software without #
# restriction, including without limitation the rights to use, copy, modify, merge, publish, #
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all copies or #
# substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING #
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND #
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, #
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #
# ------------------------------------------------------------------------------------------------ #
import jax
import numpy as onp
import chex
from ..reward_tracing import TransitionBatch
from ..utils import SumTree
from ._base import BaseReplayBuffer
__all__ = (
'PrioritizedReplayBuffer',
)
def sample(self, batch_size=32):
r"""
Get a batch of transitions to be used for bootstrapped updates.
Parameters
----------
batch_size : positive int, optional
The desired batch size of the sample.
Returns
-------
transitions : TransitionBatch
A :class:`TransitionBatch <coax.reward_tracing.TransitionBatch>` object.
"""
idx = self._sumtree.sample(n=batch_size)
P = self._sumtree.values[idx] / self._sumtree.root_value # prioritized, biased propensities
W = onp.power(P * len(self), -self.beta) # inverse propensity weights (1)
W /= W.max() # for stability, ensure only down-weighting (see sec. 3.4 of arxiv:1511.05952)
transition_batch = _concatenate_leaves(self._storage[idx])
chex.assert_equal_shape([transition_batch.W, W])
transition_batch.W *= W
return transition_batch
def update(self, idx, Adv):
r"""
Update the priority weights of transitions previously added to the buffer.
Parameters
----------
idx : 1d array of ints
The identifiers of the transitions to be updated.
Adv : ndarray
The corresponding updated advantages.
"""
idx = onp.asarray(idx, dtype='int32')
Adv = onp.asarray(Adv, dtype='float32')
chex.assert_equal_shape([idx, Adv])
chex.assert_rank([idx, Adv], 1)
idx_lookup = idx % self.capacity # wrap around
new_values = onp.where(
_get_transition_batch_idx(self._storage[idx_lookup]) == idx, # only update if ids match
onp.power(onp.abs(Adv) + self.epsilon, self.alpha),
self._sumtree.values[idx_lookup])
self._sumtree.set_values(idx_lookup, new_values)
def clear(self):
r""" Clear the experience replay buffer. """
self._storage = onp.full(shape=(self.capacity,), fill_value=None, dtype='object')
self._sumtree = SumTree(capacity=self.capacity)
self._index = 0
def _concatenate_leaves(pytrees):
return jax.tree_multimap(lambda *leaves: onp.concatenate(leaves, axis=0), *pytrees)
| 38.996032 | 100 | 0.596418 |
48edd7f48e568a644eaeb1b10b708e137aa7c9cf | 433 | py | Python | src/OTLMOW/OEFModel/Classes/Wilddet.py | davidvlaminck/OTLClassPython | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | 2 | 2022-02-01T08:58:11.000Z | 2022-02-08T13:35:17.000Z | src/OTLMOW/OEFModel/Classes/Wilddet.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | null | null | null | src/OTLMOW/OEFModel/Classes/Wilddet.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | null | null | null | # coding=utf-8
from OTLMOW.OEFModel.EMObject import EMObject
# Generated with OEFClassCreator. To modify: extend, do not edit
| 28.866667 | 117 | 0.745958 |
48eeffaa35d544f23807d7f9663c5e18d1819a1f | 16,332 | py | Python | test/python/testworkflow.py | kokizzu/txtai | 1a3848bac006e9963ad2eef466405f8da644fecb | [
"Apache-2.0"
] | null | null | null | test/python/testworkflow.py | kokizzu/txtai | 1a3848bac006e9963ad2eef466405f8da644fecb | [
"Apache-2.0"
] | 47 | 2021-10-02T22:48:03.000Z | 2021-12-29T02:36:20.000Z | test/python/testworkflow.py | kokizzu/txtai | 1a3848bac006e9963ad2eef466405f8da644fecb | [
"Apache-2.0"
] | null | null | null | """
Workflow module tests
"""
import contextlib
import glob
import io
import os
import tempfile
import sys
import unittest
import numpy as np
import torch
from txtai.api import API
from txtai.embeddings import Documents, Embeddings
from txtai.pipeline import Nop, Segmentation, Summary, Translation, Textractor
from txtai.workflow import Workflow, Task, ConsoleTask, ExportTask, FileTask, ImageTask, RetrieveTask, StorageTask, WorkflowTask
# pylint: disable = C0411
from utils import Utils
# pylint: disable=R0904
| 30.873346 | 146 | 0.558903 |
48f141e3c4e406a1ed8e50060eb75658e2cb4aab | 202 | py | Python | apps/summary/urls.py | sotkonstantinidis/testcircle | 448aa2148fbc2c969e60f0b33ce112d4740a8861 | [
"Apache-2.0"
] | 3 | 2019-02-24T14:24:43.000Z | 2019-10-24T18:51:32.000Z | apps/summary/urls.py | sotkonstantinidis/testcircle | 448aa2148fbc2c969e60f0b33ce112d4740a8861 | [
"Apache-2.0"
] | 17 | 2017-03-14T10:55:56.000Z | 2022-03-11T23:20:19.000Z | apps/summary/urls.py | sotkonstantinidis/testcircle | 448aa2148fbc2c969e60f0b33ce112d4740a8861 | [
"Apache-2.0"
] | 2 | 2016-02-01T06:32:40.000Z | 2019-09-06T04:33:50.000Z | from django.conf.urls import url
from .views import SummaryPDFCreateView
urlpatterns = [
url(r'^(?P<id>[\d]+)/$',
SummaryPDFCreateView.as_view(),
name='questionnaire_summary'),
]
| 18.363636 | 39 | 0.658416 |
48f3e0cd5e4cb55eec34f20d3487909f95548f7a | 1,418 | py | Python | utipy/array/blend.py | LudvigOlsen/utipy | c287f7eed15b3591118bba49ecdfc2b2605f59a0 | [
"MIT"
] | null | null | null | utipy/array/blend.py | LudvigOlsen/utipy | c287f7eed15b3591118bba49ecdfc2b2605f59a0 | [
"MIT"
] | 1 | 2022-02-16T15:24:33.000Z | 2022-02-16T15:24:33.000Z | utipy/array/blend.py | LudvigOlsen/utipy | c287f7eed15b3591118bba49ecdfc2b2605f59a0 | [
"MIT"
] | null | null | null | """
@author: ludvigolsen
"""
from typing import Union
import numpy as np
import pandas as pd
from utipy.utils.check_instance import check_instance
from utipy.utils.convert_to_type import convert_to_type
def blend(x1: Union[list, np.ndarray, pd.Series], x2: Union[list, np.ndarray, pd.Series], amount: float = 0.5) -> Union[list, np.ndarray, pd.Series]:
"""
Blend two arrays
Parameters
----------
x1 : list, np.ndarray, pd.Series
The first array.
x2 : list, np.ndarray, pd.Series
The second array.
amount : float
Blend rate.
Percentage between 0-1
0: Keep only x1.
1: Keep only x2.
0.1: 10% x2 / 90% x1.
A value in-between 0-1 will result in integers becoming floats.
Returns
-------
list, np.ndarray, pd.Series
Blended array with type of the original (x1)
Examples
--------
Uncomment code to run.
# x1 = [1,2,3,4,5]
# x2 = [4,5,6,7,8]
# blend(x1, x2, amount = 0.5)
returns [2.5,3.5,4.5,5.5,6.5]
"""
# Get instance types (np.ndarray, list, pd.Series)
instance_type = check_instance(x1)
x1_weighted = np.multiply(x1, (1 - amount))
x2_weighted = np.multiply(x2, amount)
blended = x1_weighted + x2_weighted
# Convert to original type (np.ndarray, list, pd.Series)
return convert_to_type(blended, instance_type)
| 24.448276 | 149 | 0.608604 |
48f4b8e6c0c1a95b21e6fbc67429a32685a3063d | 126 | py | Python | output/models/ms_data/regex/hangul_compatibility_jamo_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/ms_data/regex/hangul_compatibility_jamo_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/ms_data/regex/hangul_compatibility_jamo_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.ms_data.regex.hangul_compatibility_jamo_xsd.hangul_compatibility_jamo import Doc
__all__ = [
"Doc",
]
| 21 | 99 | 0.801587 |
48f68e5109bfeba6516e554517563cbef752a170 | 519 | py | Python | ex082.py | favitoria/python123 | 99074c309b700f48ddc6aa0811a1891145281af7 | [
"MIT"
] | null | null | null | ex082.py | favitoria/python123 | 99074c309b700f48ddc6aa0811a1891145281af7 | [
"MIT"
] | null | null | null | ex082.py | favitoria/python123 | 99074c309b700f48ddc6aa0811a1891145281af7 | [
"MIT"
] | null | null | null | resposta = 'Ss'
numeros = 0
listaTODOS = []
listaPAR = []
listaIMPAR = []
while resposta != 'N':
numeros = int(input('Digite um nmero: '))
resposta = str(input('Deseja continuar [S/N]? '))
if numeros % 2 == 0:
listaPAR.append(numeros)
elif numeros % 2 == 1:
listaIMPAR.append(numeros)
listaTODOS.append(numeros)
print(f'Os valores PARES digitados foram: {listaPAR}')
print(f'Os valores IMPARES digitados foram: {listaIMPAR}')
listaTODOS.sort()
print(f'No TOTAL foram: {listaTODOS}') | 30.529412 | 58 | 0.660886 |
48f6af2a7976b7669c6376018cbf7149ae87451d | 2,218 | py | Python | CodingInterview2/29_PrintMatrix/print_matrix.py | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
] | 10 | 2020-07-06T11:00:58.000Z | 2022-01-29T09:25:24.000Z | CodingInterview2/29_PrintMatrix/print_matrix.py | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
] | null | null | null | CodingInterview2/29_PrintMatrix/print_matrix.py | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
] | 3 | 2020-07-13T06:39:23.000Z | 2020-08-15T16:29:48.000Z | """
29
"""
def print_matrix_clockwisely(matrix: list) -> list:
"""
Print the given matrix clockwesely.
Parameters
-----------
matrix: list[list]
the given matrix.
Returns
---------
out: list
the clockwise order of the matrix.
Notes
------
"""
if not matrix:
return []
if not matrix[0]:
return []
res = []
start = 0
rows, cols = len(matrix), len(matrix[0])
while rows > 2 * start and cols > 2 * start:
print_circle2(matrix, rows, cols, start, res)
start += 1
return res
if __name__ == '__main__':
m = make_matrix(1,5)
print(m)
res = print_matrix_clockwisely(m)
print(res)
| 21.533981 | 77 | 0.540126 |
48f6c64933693697a368fb1d2ae925d6fe4cb255 | 1,170 | py | Python | migrations/versions/ee5315dcf3e1_.py | wildintellect/tasking-manager | 373fb231404628e6ae9a1838539b9c3cb23ad73c | [
"BSD-2-Clause"
] | 3 | 2018-04-24T08:12:31.000Z | 2020-09-02T18:11:21.000Z | migrations/versions/ee5315dcf3e1_.py | wildintellect/tasking-manager | 373fb231404628e6ae9a1838539b9c3cb23ad73c | [
"BSD-2-Clause"
] | 28 | 2019-01-04T17:39:00.000Z | 2021-05-06T23:06:24.000Z | migrations/versions/ee5315dcf3e1_.py | wildintellect/tasking-manager | 373fb231404628e6ae9a1838539b9c3cb23ad73c | [
"BSD-2-Clause"
] | 3 | 2020-02-29T20:46:09.000Z | 2020-11-20T19:44:04.000Z | """empty message
Revision ID: ee5315dcf3e1
Revises: 9f5b73af01db
Create Date: 2017-05-24 10:39:46.586986
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ee5315dcf3e1'
down_revision = '9f5b73af01db'
branch_labels = None
depends_on = None
| 31.621622 | 87 | 0.694017 |
48f9216cd7b0c9c64c3f0cc8145822d20126a1a1 | 572 | py | Python | python/random-videogame-generator.py | iamashiq/Hacktoberfest2021-2 | 9823996e9e97a25fcf70abc6fd6c55e4b60da568 | [
"MIT"
] | 6 | 2021-10-04T07:57:24.000Z | 2021-11-15T13:35:21.000Z | python/random-videogame-generator.py | iamashiq/Hacktoberfest2021-2 | 9823996e9e97a25fcf70abc6fd6c55e4b60da568 | [
"MIT"
] | 2 | 2021-10-14T16:55:50.000Z | 2021-10-31T12:17:20.000Z | python/random-videogame-generator.py | iamashiq/Hacktoberfest2021-2 | 9823996e9e97a25fcf70abc6fd6c55e4b60da568 | [
"MIT"
] | 33 | 2021-10-03T05:00:58.000Z | 2021-11-05T19:49:19.000Z | print("Are you trying to find new videogames to play?")
print("let me help you!")
print("do you like shooting games, yes or no")
shooting=input()
if shooting = "yes"
print("do you like battle royale games?")
br=input()
if br="yes"
print("you should try out call of duty!")
else if br="no"
print("you should try overwatch!")
else if shooting="no"
print("do you like sports games, yes or no")
sports=input()
if sports="yes"
print("try out Fifa or NBA2k!")
else if sports="no"
print("I know, try out rocket league!")
| 30.105263 | 55 | 0.636364 |
48f9edbd6a5a7ba5a520ddc41c7a0b91f9666bf5 | 1,382 | py | Python | cosmic_ray/operators/unary_operator_replacement.py | rob-smallshire/cosmic-ray | 4fd751b38eee30568f8366e09452d7aa60be4e26 | [
"MIT"
] | null | null | null | cosmic_ray/operators/unary_operator_replacement.py | rob-smallshire/cosmic-ray | 4fd751b38eee30568f8366e09452d7aa60be4e26 | [
"MIT"
] | null | null | null | cosmic_ray/operators/unary_operator_replacement.py | rob-smallshire/cosmic-ray | 4fd751b38eee30568f8366e09452d7aa60be4e26 | [
"MIT"
] | null | null | null | """Implementation of the unary-operator-replacement operator.
"""
import ast
from .operator import Operator
from ..util import build_mutations
# None indicates we want to delete the operator
OPERATORS = (ast.UAdd, ast.USub, ast.Invert, ast.Not, None)
| 28.791667 | 78 | 0.607815 |
48fa5657a82772ca80f844d0c1f8bca709ceaf35 | 2,069 | py | Python | src/icolos/core/workflow_steps/calculation/rmsd.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
] | 11 | 2022-01-30T14:36:13.000Z | 2022-03-22T09:40:57.000Z | src/icolos/core/workflow_steps/calculation/rmsd.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
] | 2 | 2022-03-23T07:56:49.000Z | 2022-03-24T12:01:42.000Z | src/icolos/core/workflow_steps/calculation/rmsd.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
] | 8 | 2022-01-28T10:32:31.000Z | 2022-03-22T09:40:59.000Z | from typing import List
from pydantic import BaseModel
from icolos.core.containers.compound import Conformer, unroll_conformers
from icolos.utils.enums.step_enums import StepRMSDEnum, StepDataManipulationEnum
from icolos.core.workflow_steps.step import _LE
from icolos.core.workflow_steps.calculation.base import StepCalculationBase
_SR = StepRMSDEnum()
_SDM = StepDataManipulationEnum()
| 43.104167 | 118 | 0.669889 |
48fb1aa9e5e10603d8a878537cb85772b452f285 | 468 | py | Python | iot/iot_portal/doctype/iot_homepage/iot_homepage.py | srdgame/symlink_iot | 6ec524498cccaf2f49f7264a3b284a8956bd430c | [
"MIT"
] | 4 | 2017-09-26T09:21:19.000Z | 2021-12-22T10:26:36.000Z | iot/iot_portal/doctype/iot_homepage/iot_homepage.py | srdgame/symlink_iot | 6ec524498cccaf2f49f7264a3b284a8956bd430c | [
"MIT"
] | 1 | 2017-11-21T20:53:10.000Z | 2017-12-11T02:17:06.000Z | iot/iot_portal/doctype/iot_homepage/iot_homepage.py | srdgame/symlink_iot | 6ec524498cccaf2f49f7264a3b284a8956bd430c | [
"MIT"
] | 9 | 2017-03-17T04:12:22.000Z | 2022-03-21T09:33:11.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Dirk Chang and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.website.utils import delete_page_cache
| 31.2 | 84 | 0.782051 |
48fb52f8c130468ec6ba0fdb93a761de09a44b65 | 368 | py | Python | src/garage/envs/env_spec.py | Maltimore/garage | a3f44b37eeddca37d157766a9a72e8772f104bcd | [
"MIT"
] | 2 | 2020-03-15T14:35:15.000Z | 2021-02-15T16:38:00.000Z | src/garage/envs/env_spec.py | Maltimore/garage | a3f44b37eeddca37d157766a9a72e8772f104bcd | [
"MIT"
] | null | null | null | src/garage/envs/env_spec.py | Maltimore/garage | a3f44b37eeddca37d157766a9a72e8772f104bcd | [
"MIT"
] | 1 | 2020-02-24T03:04:23.000Z | 2020-02-24T03:04:23.000Z | """EnvSpec class."""
| 23 | 73 | 0.668478 |
48fc04ddecaf2a0349002da2c688a1f9e69caacb | 105 | py | Python | exercises/exe41 - 50/exe047.py | thomas-rohde/Classes-Python | f862995510b7aabf68bc14aecf815f597034d8a1 | [
"MIT"
] | null | null | null | exercises/exe41 - 50/exe047.py | thomas-rohde/Classes-Python | f862995510b7aabf68bc14aecf815f597034d8a1 | [
"MIT"
] | null | null | null | exercises/exe41 - 50/exe047.py | thomas-rohde/Classes-Python | f862995510b7aabf68bc14aecf815f597034d8a1 | [
"MIT"
] | null | null | null | t = int(input('Digite um n: '))
for t0 in range(1, 11):
print('{} X {} = {}'.format(t, t0, t * t0))
| 26.25 | 47 | 0.495238 |
48fe1f175aa02923066c86fda95e2c0081a49955 | 98,484 | py | Python | pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-DIAMETER-BASE-PROTOCOL-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-DIAMETER-BASE-PROTOCOL-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:54:20 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
ciscoExperiment, = mibBuilder.importSymbols("CISCO-SMI", "ciscoExperiment")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Gauge32, ObjectIdentity, Unsigned32, NotificationType, iso, MibIdentifier, Counter64, Counter32, Bits, Integer32, ModuleIdentity, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ObjectIdentity", "Unsigned32", "NotificationType", "iso", "MibIdentifier", "Counter64", "Counter32", "Bits", "Integer32", "ModuleIdentity", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks")
RowStatus, StorageType, TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "StorageType", "TruthValue", "DisplayString", "TextualConvention")
ciscoDiameterBasePMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 10, 133))
ciscoDiameterBasePMIB.setRevisions(('2006-08-24 00:01',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setRevisionsDescriptions(('Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setLastUpdated('200608240001Z')
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-aaa@cisco.com')
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setDescription("The MIB module for entities implementing the Diameter Base Protocol. Initial Cisco'ized version of the IETF draft draft-zorn-dime-diameter-base-protocol-mib-00.txt.")
ciscoDiameterBasePMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 0))
ciscoDiameterBasePMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1))
ciscoDiameterBasePMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2))
cdbpLocalCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1))
cdbpLocalStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2))
cdbpPeerCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3))
cdbpPeerStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4))
cdbpRealmCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5))
cdbpRealmStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6))
cdbpTrapCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7))
ciscoDiaBaseProtEnableProtocolErrorNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableProtocolErrorNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableProtocolErrorNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtProtocolErrorNotif notification.')
ciscoDiaBaseProtProtocolErrorNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsProtocolErrors"))
if mibBuilder.loadTexts: ciscoDiaBaseProtProtocolErrorNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtProtocolErrorNotif.setDescription('An ciscoDiaBaseProtProtocolErrorNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnableProtocolErrorNotif is true(1) 2) the value of cdbpPeerStatsProtocolErrors changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnableTransientFailureNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableTransientFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableTransientFailureNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtTransientFailureNotif notification.')
ciscoDiaBaseProtTransientFailureNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 2)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransientFailures"))
if mibBuilder.loadTexts: ciscoDiaBaseProtTransientFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtTransientFailureNotif.setDescription('An ciscoDiaBaseProtTransientFailureNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnableTransientFailureNotif is true(1) 2) the value of cdbpPeerStatsTransientFailures changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnablePermanentFailureNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 3), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePermanentFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePermanentFailureNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPermanentFailureNotif notification.')
ciscoDiaBaseProtPermanentFailureNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 3)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsPermanentFailures"))
if mibBuilder.loadTexts: ciscoDiaBaseProtPermanentFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtPermanentFailureNotif.setDescription('An ciscoDiaBaseProtPermanentFailureNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePermanentFailureNotif is true(1) 2) the value of cdbpPeerStatsPermanentFailures changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnablePeerConnectionDownNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 4), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionDownNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionDownNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPeerConnectionDownNotif notification.')
ciscoDiaBaseProtPeerConnectionDownNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 4)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"))
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionDownNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionDownNotif.setDescription('An ciscoDiaBaseProtPeerConnectionDownNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePeerConnectionDownNotif is true(1) 2) cdbpPeerStatsState changes to closed(1). It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnablePeerConnectionUpNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 5), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionUpNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionUpNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPeerConnectionUpNotif notification.')
ciscoDiaBaseProtPeerConnectionUpNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 5)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"))
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionUpNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionUpNotif.setDescription('An ciscoDiaBaseProtPeerConnectionUpNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePeerConnectionUpNotif is true(1) 2) the value of cdbpPeerStatsState changes to either rOpen(6)or iOpen(7). It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
cdbpLocalId = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 1), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalId.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalId.setDescription("The implementation identification string for the Diameter software in use on the system, for example; 'diameterd'")
cdbpLocalIpAddrTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2), )
if mibBuilder.loadTexts: cdbpLocalIpAddrTable.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrTable.setDescription("The table listing the Diameter local host's IP Addresses.")
cdbpLocalIpAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddrIndex"))
if mibBuilder.loadTexts: cdbpLocalIpAddrEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrEntry.setDescription('A row entry representing a Diameter local host IP Address.')
cdbpLocalIpAddrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpLocalIpAddrIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrIndex.setDescription('A number uniquely identifying the number of IP Addresses supported by this Diameter host.')
cdbpLocalIpAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalIpAddrType.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrType.setDescription('The type of internet address stored in cdbpLocalIpAddress.')
cdbpLocalIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 3), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalIpAddress.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddress.setDescription('The IP-Address of the host, which is of the type specified in cdbpLocalIpAddrType.')
cdbpLocalTcpListenPort = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalTcpListenPort.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalTcpListenPort.setDescription("This object represents Diameter TCP 'listen' port.")
cdbpLocalSctpListenPort = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalSctpListenPort.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalSctpListenPort.setDescription("This object represents Diameter SCTP 'listen' port.")
cdbpLocalOriginHost = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 5), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpLocalOriginHost.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalOriginHost.setDescription('This object represents the Local Origin Host.')
cdbpLocalRealm = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalRealm.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalRealm.setDescription('This object represents the Local Realm Name.')
cdbpRedundancyEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 7), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpRedundancyEnabled.setStatus('current')
if mibBuilder.loadTexts: cdbpRedundancyEnabled.setDescription('This parameter indicates if cisco redundancy has been enabled, it is enabled if set to true and disabled if set to false.')
cdbpRedundancyInfraState = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("rfUnknown", 0), ("rfDisabled", 1), ("rfInitialization", 2), ("rfNegotiation", 3), ("rfStandbyCold", 4), ("rfStandbyConfig", 5), ("rfStandbyFileSys", 6), ("rfStandbyBulk", 7), ("rfStandbyHot", 8), ("rfActiveFast", 9), ("rfActiveDrain", 10), ("rfActivePreconfig", 11), ("rfActivePostconfig", 12), ("rfActive", 13), ("rfActiveExtraload", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRedundancyInfraState.setStatus('current')
if mibBuilder.loadTexts: cdbpRedundancyInfraState.setDescription("This parameter indicates the current state of cisco redundancy infrastructure state. rfUnknown(0) - unknown state rfDisabled(1) - RF is not functioning at this time rfInitialization(2) - co-ordinating init with platform rfNegotiation(3) - initial negotiation with peer to determine active-standby rfStandbyCold(4) - peer is active, we're cold rfStandbyConfig(5) - sync config from active to standby rfStandbyFileSys(6) - sync file sys from active to standby rfStandbyBulk(7) - clients bulk sync from active to standby rfStandbyHot(8) - standby ready-n-able to be active rfActiveFast(9) - immediate notification of standby going active rfActiveDrain(10) - drain queued messages from peer rfActivePreconfig(11) - active and before config rfActivePostconfig(12) - active and post config rfActive(13) - actively processing new calls rfActiveExtraload(14) - actively processing new calls extra resources other Processing is failed and I have extra load.")
cdbpRedundancyLastSwitchover = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRedundancyLastSwitchover.setStatus('current')
if mibBuilder.loadTexts: cdbpRedundancyLastSwitchover.setDescription('This object represents the Last Switchover Time.')
cdbpLocalApplTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10), )
if mibBuilder.loadTexts: cdbpLocalApplTable.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplTable.setDescription('The table listing the Diameter applications supported by this server.')
cdbpLocalApplEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplIndex"))
if mibBuilder.loadTexts: cdbpLocalApplEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplEntry.setDescription('A row entry representing a Diameter application on this server.')
cdbpLocalApplIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpLocalApplIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplIndex.setDescription('A number uniquely identifying a supported Diameter application. Upon reload, cdbpLocalApplIndex values may be changed.')
cdbpLocalApplStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 2), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalApplStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpLocalApplStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplStorageType.setDescription('The storage type for this conceptual row. None of the columnar objects is writable when the conceptual row is permanent.')
cdbpLocalApplRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalApplRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdsgStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpLocalApplIndex has been set. cdbpLocalApplIndex may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpLocalApplStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpLocalApplStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpLocalApplStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpLocalVendorTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11), )
if mibBuilder.loadTexts: cdbpLocalVendorTable.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorTable.setDescription('The table listing the vendor IDs supported by local Diameter.')
cdbpLocalVendorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorIndex"))
if mibBuilder.loadTexts: cdbpLocalVendorEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorEntry.setDescription('A row entry representing a vendor ID supported by local Diameter.')
cdbpLocalVendorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpLocalVendorIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorIndex.setDescription('A number uniquely identifying the vendor ID supported by local Diameter. Upon reload, cdbpLocalVendorIndex values may be changed.')
cdbpLocalVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 9, 10415, 12645))).clone(namedValues=NamedValues(("diameterVendorIetf", 0), ("diameterVendorCisco", 9), ("diameterVendor3gpp", 10415), ("diameterVendorVodafone", 12645))).clone('diameterVendorIetf')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorId.setDescription('The active vendor ID used for peer connections. diameterVendorIetf(0) - Diameter vendor id ietf diameterVendorCisco(9) - Diameter vendor id cisco diameterVendor3gpp(10415) - Diameter vendor id 3gpp diameterVendorVodafone(12645) - Diameter vendor id vodafone.')
cdbpLocalVendorStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.')
cdbpLocalVendorRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalVendorRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpLocalVendorRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpLocalVendorId has been set. cdbpLocalVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpLocalVendorRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpLocalVendorRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpLocalVendorRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpAppAdvToPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12), )
if mibBuilder.loadTexts: cdbpAppAdvToPeerTable.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerTable.setDescription('The table listing the applications advertised by this host to each peer and the types of service supported: accounting, authentication or both.')
cdbpAppAdvToPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerVendorId"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerIndex"))
if mibBuilder.loadTexts: cdbpAppAdvToPeerEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.')
cdbpAppAdvToPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvToPeerVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerVendorId.setDescription('The IANA Enterprise Code value assigned to the vendor of the Diameter device.')
cdbpAppAdvToPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvToPeerIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerIndex.setDescription('A number uniquely identifying the Diameter applications advertised as supported by this host to each peer. Upon reload, cdbpAppAdvToPeerIndex values may be changed.')
cdbpAppAdvToPeerServices = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpAppAdvToPeerServices.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerServices.setDescription('The type of services supported for each application, accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.')
cdbpAppAdvToPeerStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 4), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.')
cdbpAppAdvToPeerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpAppAdvToPeerRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpAppAdvToPeerRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpAppAdvToPeerVendorId has been set. cdbpAppAdvToPeerVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpAppAdvToPeerRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpAppAdvToPeerRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpAppAdvToPeerRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpLocalStatsTotalPacketsIn = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 1), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsIn.setDescription('The total number of packets received by Diameter Base Protocol.')
cdbpLocalStatsTotalPacketsOut = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 2), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsOut.setDescription('The total number of packets transmitted by Diameter Base Protocol.')
cdbpLocalStatsTotalUpTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 3), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalStatsTotalUpTime.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalStatsTotalUpTime.setDescription('This object represents the total time the Diameter server has been up until now.')
cdbpLocalResetTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 4), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalResetTime.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalResetTime.setDescription("If the server keeps persistent state (e.g., a process) and supports a 'reset' operation (e.g., can be told to re-read configuration files), this value will be the time elapsed (in hundredths of a second) since the server was 'reset'. For software that does not have persistence or does not support a 'reset' operation, this value will be zero.")
cdbpLocalConfigReset = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("reset", 2), ("initializing", 3), ("running", 4))).clone('other')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpLocalConfigReset.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalConfigReset.setDescription('Status/action object to reinitialize any persistent server state. When set to reset(2), any persistent server state (such as a process) is reinitialized as if the server had just been started. This value will never be returned by a read operation. When read, one of the following values will be returned: other(1) - server in some unknown state. reset(2) - command to reinitialize server state. initializing(3) - server (re)initializing. running(4) - server currently running.')
cdbpPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1), )
if mibBuilder.loadTexts: cdbpPeerTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerTable.setDescription('The table listing information regarding the discovered or configured Diameter peer servers.')
cdbpPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"))
if mibBuilder.loadTexts: cdbpPeerEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.')
cdbpPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpPeerIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIndex.setDescription('A number uniquely identifying each Diameter peer with which the host server communicates. Upon reload, cdbpPeerIndex values may be changed.')
cdbpPeerId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerId.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerId.setDescription('The server identifier for the Diameter peer. It must be unique and non-empty.')
cdbpPeerPortConnect = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerPortConnect.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerPortConnect.setDescription('The connection port this server used to connect to the Diameter peer. If there is no active connection, this value will be zero(0).')
cdbpPeerPortListen = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(3868)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerPortListen.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerPortListen.setDescription('The port the server is listening on.')
cdbpPeerProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("tcp", 1), ("sctp", 2))).clone('tcp')).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerProtocol.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerProtocol.setDescription('The transport protocol (tcp/sctp) the Diameter peer is using. tcp(1) - Transmission Control Protocol sctp(2) - Stream Control Transmission Protocol.')
cdbpPeerSecurity = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("tls", 2), ("ipsec", 3))).clone('other')).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerSecurity.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerSecurity.setDescription('The security the Diameter peer is using. other(1) - Unknown Security Protocol. tls(2) - Transport Layer Security Protocol. ipsec(3) - Internet Protocol Security.')
cdbpPeerFirmwareRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerFirmwareRevision.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerFirmwareRevision.setDescription('Firmware revision of peer. If no firmware revision, the revision of the Diameter software module may be reported instead.')
cdbpPeerStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 8), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpPeerStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStorageType.setDescription('The storage type for this conceptual row. Only cdbpPeerPortListen object is writable when the conceptual row is permanent.')
cdbpPeerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpPeerRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpPeerId has been set. cdbpPeerId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpPeerRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpPeerRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpPeerRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpPeerIpAddrTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2), )
if mibBuilder.loadTexts: cdbpPeerIpAddrTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddrTable.setDescription('The table listing the Diameter server IP Addresses.')
cdbpPeerIpAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddressIndex"))
if mibBuilder.loadTexts: cdbpPeerIpAddrEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddrEntry.setDescription('A row entry representing peer Diameter server IP Addresses.')
cdbpPeerIpAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpPeerIpAddressIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddressIndex.setDescription('A number uniquely identifying the number of IP Addresses supported by all Diameter peers.')
cdbpPeerIpAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerIpAddressType.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddressType.setDescription('The type of address stored in diameterPeerIpAddress.')
cdbpPeerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpPeerIpAddress.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddress.setDescription('The active IP Address(es) used for connections.')
cdbpAppAdvFromPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3), )
if mibBuilder.loadTexts: cdbpAppAdvFromPeerTable.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerTable.setDescription('The table listing the applications advertised by each peer to this host and the types of service supported: accounting, authentication or both.')
cdbpAppAdvFromPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerVendorId"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerIndex"))
if mibBuilder.loadTexts: cdbpAppAdvFromPeerEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.')
cdbpAppAdvFromPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvFromPeerVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerVendorId.setDescription('The IANA Enterprise Code value assigned to the vendor of the Diameter device.')
cdbpAppAdvFromPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvFromPeerIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerIndex.setDescription('A number uniquely identifying the applications advertised as supported from each Diameter peer.')
cdbpAppAdvFromPeerType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpAppAdvFromPeerType.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerType.setDescription('The type of services supported for each application, accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.')
cdbpPeerVendorTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4), )
if mibBuilder.loadTexts: cdbpPeerVendorTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorTable.setDescription('The table listing the Vendor IDs supported by the peer.')
cdbpPeerVendorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorIndex"))
if mibBuilder.loadTexts: cdbpPeerVendorEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorEntry.setDescription('A row entry representing a Vendor ID supported by the peer.')
cdbpPeerVendorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpPeerVendorIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorIndex.setDescription('A number uniquely identifying the Vendor ID supported by the peer. Upon reload, cdbpPeerVendorIndex values may be changed.')
cdbpPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 9, 10415, 12645))).clone(namedValues=NamedValues(("diameterVendorIetf", 0), ("diameterVendorCisco", 9), ("diameterVendor3gpp", 10415), ("diameterVendorVodafone", 12645))).clone('diameterVendorIetf')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorId.setDescription('The active vendor ID used for peer connections. diameterVendorIetf(0) - Diameter vendor id ietf diameterVendorCisco(9) - Diameter vendor id cisco diameterVendor3gpp(10415) - Diameter vendor id 3gpp diameterVendorVodafone(12645) - Diameter vendor id vodafone.')
cdbpPeerVendorStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.')
cdbpPeerVendorRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerVendorRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpPeerVendorRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpPeerVendorId has been set. Also, a newly created row cannot be made active until the corresponding 'cdbpPeerIndex' has been set. cdbpPeerVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpPeerVendorRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpPeerVendorRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpPeerVendorRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpPeerStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1), )
if mibBuilder.loadTexts: cdbpPeerStatsTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTable.setDescription('The table listing the Diameter peer statistics.')
cdbpPeerStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"))
if mibBuilder.loadTexts: cdbpPeerStatsEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsEntry.setDescription('A row entry representing a Diameter peer.')
cdbpPeerStatsState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("closed", 1), ("waitConnAck", 2), ("waitICEA", 3), ("elect", 4), ("waitReturns", 5), ("rOpen", 6), ("iOpen", 7), ("closing", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsState.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsState.setDescription('Connection state in the Peer State Machine of the peer with which this Diameter server is communicating. closed(1) - Connection closed with this peer. waitConnAck(2) - Waiting for an acknowledgment from this peer. waitICEA(3) - Waiting for a Capabilities-Exchange- Answer from this peer. elect(4) - When the peer and the server are both trying to bring up a connection with each other at the same time. An election process begins which determines which socket remains open. waitReturns(5) - Waiting for election returns. r-open(6) - Responder transport connection is used for communication. i-open(7) - Initiator transport connection is used for communication. closing(8) - Actively closing and doing cleanup.')
cdbpPeerStatsStateDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 2), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsStateDuration.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsStateDuration.setDescription('This object represents the Peer state duration.')
cdbpPeerStatsLastDiscCause = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("rebooting", 1), ("busy", 2), ("doNotWantToTalk", 3), ("election", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsLastDiscCause.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsLastDiscCause.setDescription("The last cause for a peers disconnection. rebooting(1) - A scheduled reboot is imminent. busy(2) - The peer's internal resources are constrained, and it has determined that the transport connection needs to be shutdown. doNotWantToTalk(3) - The peer has determined that it does not see a need for the transport connection to exist, since it does not expect any messages to be exchanged in the foreseeable future. electionLost(4) - The peer has determined that it has lost the election process and has therefore disconnected the transport connection.")
cdbpPeerStatsWhoInitDisconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("host", 1), ("peer", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsWhoInitDisconnect.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsWhoInitDisconnect.setDescription('Did the host or peer initiate the disconnect? host(1) - If this server initiated the disconnect. peer(2) - If the peer with which this server was connected initiated the disconnect.')
cdbpPeerStatsDWCurrentStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("okay", 1), ("suspect", 2), ("down", 3), ("reopen", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWCurrentStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWCurrentStatus.setDescription('This object indicates the connection status. okay(1) - Indicates the connection is presumed working. suspect(2) - Indicates the connection is possibly congested or down. down(3) - The peer is no longer reachable, causing the transport connection to be shutdown. reopen(4) - Three watchdog messages are exchanged with accepted round trip times, and the connection to the peer is considered stabilized.')
cdbpPeerStatsTimeoutConnAtmpts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 6), Counter32()).setUnits('attempts').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTimeoutConnAtmpts.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTimeoutConnAtmpts.setDescription('If there is no transport connection with a peer, this is the number of times the server attempts to connect to that peer. This is reset on disconnection.')
cdbpPeerStatsASRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 7), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASRsIn.setDescription('Abort-Session-Request messages received from the peer.')
cdbpPeerStatsASRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 8), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASRsOut.setDescription('Abort-Session-Request messages sent to the peer.')
cdbpPeerStatsASAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 9), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASAsIn.setDescription('Number of Abort-Session-Answer messages received from the peer.')
cdbpPeerStatsASAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 10), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASAsOut.setDescription('Number of Abort-Session-Answer messages sent to the peer.')
cdbpPeerStatsACRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 11), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACRsIn.setDescription('Number of Accounting-Request messages received from the peer.')
cdbpPeerStatsACRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 12), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACRsOut.setDescription('Number of Accounting-Request messages sent to the peer.')
cdbpPeerStatsACAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 13), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACAsIn.setDescription('Number of Accounting-Answer messages received from the peer.')
cdbpPeerStatsACAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 14), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACAsOut.setDescription('Number of Accounting-Answer messages sent to the peer.')
cdbpPeerStatsCERsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 15), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCERsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCERsIn.setDescription('Number of Capabilities-Exchange-Request messages received from the peer.')
cdbpPeerStatsCERsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 16), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCERsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCERsOut.setDescription('Number of Capabilities-Exchange-Request messages sent to the peer.')
cdbpPeerStatsCEAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 17), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCEAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCEAsIn.setDescription('Number of Capabilities-Exchange-Answer messages received from the peer.')
cdbpPeerStatsCEAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 18), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCEAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCEAsOut.setDescription('Number of Capabilities-Exchange-Answer messages sent to the peer.')
cdbpPeerStatsDWRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 19), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWRsIn.setDescription('Number of Device-Watchdog-Request messages received from the peer.')
cdbpPeerStatsDWRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 20), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWRsOut.setDescription('Number of Device-Watchdog-Request messages sent to the peer.')
cdbpPeerStatsDWAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 21), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWAsIn.setDescription('Number of Device-Watchdog-Answer messages received from the peer.')
cdbpPeerStatsDWAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 22), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWAsOut.setDescription('Number of Device-Watchdog-Answer messages sent to the peer.')
cdbpPeerStatsDPRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 23), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPRsIn.setDescription('Number of Disconnect-Peer-Request messages received.')
cdbpPeerStatsDPRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 24), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPRsOut.setDescription('Number of Disconnect-Peer-Request messages sent.')
cdbpPeerStatsDPAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 25), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPAsIn.setDescription('Number of Disconnect-Peer-Answer messages received.')
cdbpPeerStatsDPAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 26), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPAsOut.setDescription('Number of Disconnect-Peer-Answer messages sent.')
cdbpPeerStatsRARsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 27), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRARsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRARsIn.setDescription('Number of Re-Auth-Request messages received.')
cdbpPeerStatsRARsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 28), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRARsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRARsOut.setDescription('Number of Re-Auth-Request messages sent.')
cdbpPeerStatsRAAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 29), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRAAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRAAsIn.setDescription('Number of Re-Auth-Answer messages received.')
cdbpPeerStatsRAAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 30), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRAAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRAAsOut.setDescription('Number of Re-Auth-Answer messages sent.')
cdbpPeerStatsSTRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 31), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTRsIn.setDescription('Number of Session-Termination-Request messages received from the peer.')
cdbpPeerStatsSTRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 32), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTRsOut.setDescription('Number of Session-Termination-Request messages sent to the peer.')
cdbpPeerStatsSTAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 33), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTAsIn.setDescription('Number of Session-Termination-Answer messages received from the peer.')
cdbpPeerStatsSTAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 34), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTAsOut.setDescription('Number of Session-Termination-Answer messages sent to the peer.')
cdbpPeerStatsDWReqTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 35), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWReqTimer.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWReqTimer.setDescription('Device-Watchdog Request Timer, which is the interval between packets sent to peers.')
cdbpPeerStatsRedirectEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRedirectEvents.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRedirectEvents.setDescription('Redirect Event count, which is the number of redirects sent from a peer.')
cdbpPeerStatsAccDupRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccDupRequests.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccDupRequests.setDescription('The number of duplicate Diameter Accounting-Request packets received.')
cdbpPeerStatsMalformedReqsts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsMalformedReqsts.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsMalformedReqsts.setDescription('The number of malformed Diameter packets received.')
cdbpPeerStatsAccsNotRecorded = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccsNotRecorded.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccsNotRecorded.setDescription('The number of Diameter Accounting-Request packets which were received and responded to but not recorded.')
cdbpPeerStatsAccRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccRetrans.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccRetrans.setDescription('The number of Diameter Accounting-Request packets retransmitted to this Diameter server.')
cdbpPeerStatsTotalRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTotalRetrans.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTotalRetrans.setDescription('The number of Diameter packets retransmitted to this Diameter server, not to include Diameter Accounting-Request packets retransmitted.')
cdbpPeerStatsAccPendReqstsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 42), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccPendReqstsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccPendReqstsOut.setDescription('The number of Diameter Accounting-Request packets sent to this peer that have not yet timed out or received a response. This variable is incremented when an Accounting-Request is sent to this server and decremented due to receipt of an Accounting-Response, a timeout or a retransmission.')
cdbpPeerStatsAccReqstsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccReqstsDropped.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccReqstsDropped.setDescription('The number of Accounting-Requests to this server that have been dropped.')
cdbpPeerStatsHByHDropMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsHByHDropMessages.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsHByHDropMessages.setDescription('An answer message that is received with an unknown hop-by-hop identifier. Does not include accounting requests dropped.')
cdbpPeerStatsEToEDupMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsEToEDupMessages.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsEToEDupMessages.setDescription('Duplicate answer messages that are to be locally consumed. Does not include duplicate accounting requests received.')
cdbpPeerStatsUnknownTypes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsUnknownTypes.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsUnknownTypes.setDescription('The number of Diameter packets of unknown type which were received.')
cdbpPeerStatsProtocolErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsProtocolErrors.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsProtocolErrors.setDescription('This object represents the Number of protocol errors returned to peer, but not including redirects.')
cdbpPeerStatsTransientFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTransientFailures.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTransientFailures.setDescription('This object represents the transient failure count.')
cdbpPeerStatsPermanentFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsPermanentFailures.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsPermanentFailures.setDescription('This object represents the Number of permanent failures returned to peer.')
cdbpPeerStatsTransportDown = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTransportDown.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTransportDown.setDescription('This object represents the Number of unexpected transport failures.')
cdbpRealmKnownPeersTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1), )
if mibBuilder.loadTexts: cdbpRealmKnownPeersTable.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersTable.setDescription('The table listing the Diameter realms and known peers.')
cdbpRealmKnownPeersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeersIndex"))
if mibBuilder.loadTexts: cdbpRealmKnownPeersEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersEntry.setDescription('A row entry representing a Diameter realm and known peers.')
cdbpRealmKnownPeersIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpRealmKnownPeersIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersIndex.setDescription('A number uniquely identifying a peer known to this realm. Upon reload, cdbpRealmKnownPeersIndex values may be changed.')
cdbpRealmKnownPeers = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmKnownPeers.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeers.setDescription('The index of the peer this realm knows about. This is an ordered list, where the ordering signifies the order in which the peers are tried. Same as the cdbpPeerIndex')
cdbpRealmKnownPeersChosen = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("roundRobin", 1), ("loadBalance", 2), ("firstPreferred", 3), ("mostRecentFirst", 4), ("other", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmKnownPeersChosen.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersChosen.setDescription('How the realm chooses which peer to send packets to. roundRobin(1) - The peer used for each transaction is selected based on the order in which peers are configured. loadBalance(2) - The peer used for each transaction is based on the load metric (maybe implementation dependent) of all peers defined for the realm, with the least loaded server selected first. firstPreferred(3) - The first defined server is always used for transactions unless failover occurs. mostRecentFirst(4) - The most recently used server is used first for each transaction.')
cdbpRealmMessageRouteTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1), )
if mibBuilder.loadTexts: cdbpRealmMessageRouteTable.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteTable.setDescription('The table listing the Diameter realm-based message route information.')
cdbpRealmMessageRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteIndex"))
if mibBuilder.loadTexts: cdbpRealmMessageRouteEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteEntry.setDescription('A row entry representing a Diameter realm based message route server.')
cdbpRealmMessageRouteIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpRealmMessageRouteIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteIndex.setDescription('A number uniquely identifying each realm.')
cdbpRealmMessageRouteRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 2), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRealm.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRealm.setDescription('This object represents the realm name')
cdbpRealmMessageRouteApp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteApp.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteApp.setDescription('Application id used to route packets to this realm.')
cdbpRealmMessageRouteType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteType.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteType.setDescription('The types of service supported for each realm application: accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.')
cdbpRealmMessageRouteAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("local", 1), ("relay", 2), ("proxy", 3), ("redirect", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteAction.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteAction.setDescription('The action is used to identify how a message should be treated based on the realm, application and type. local(1) - Diameter messages that resolve to a route entry with the Local Action set to Local can be satisfied locally, and do not need to be routed to another server. relay(2) - All Diameter messages that fall within this category MUST be routed to a next-hop server, without modifying any non-routing AVPs. proxy(3) - All Diameter messages that fall within this category MUST be routed to a next-hop server. redirect(4) - Diameter messages that fall within this category MUST have the identity of the home Diameter server(s) appended, and returned to the sender of the message.')
cdbpRealmMessageRouteACRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 6), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsIn.setDescription('Number of Accounting-Request messages received from the realm.')
cdbpRealmMessageRouteACRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 7), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsOut.setDescription('Number of Accounting-Request messages sent to the realm.')
cdbpRealmMessageRouteACAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 8), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsIn.setDescription('Number of Accounting-Answer messages received from the realm.')
cdbpRealmMessageRouteACAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 9), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsOut.setDescription('Number of Accounting-Answer messages sent to the realm.')
cdbpRealmMessageRouteRARsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 10), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsIn.setDescription('Number of Re-Auth-Request messages received from the realm.')
cdbpRealmMessageRouteRARsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 11), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsOut.setDescription('Number of Re-Auth-Request messages sent to the realm.')
cdbpRealmMessageRouteRAAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 12), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsIn.setDescription('Number of Re-Auth-Answer messages received from the realm.')
cdbpRealmMessageRouteRAAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 13), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsOut.setDescription('Number of Re-Auth-Answer messages sent to the realm.')
cdbpRealmMessageRouteSTRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 14), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsIn.setDescription('Number of Session-Termination-Request messages received from the realm.')
cdbpRealmMessageRouteSTRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 15), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsOut.setDescription('Number of Session-Termination-Request messages sent to the realm.')
cdbpRealmMessageRouteSTAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 16), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsIn.setDescription('Number of Session-Termination-Answer messages received from the realm.')
cdbpRealmMessageRouteSTAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 17), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsOut.setDescription('Number of Session-Termination-Answer messages sent to the realm.')
cdbpRealmMessageRouteASRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 18), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsIn.setDescription('Number of Abort-Session-Request messages received from the realm.')
cdbpRealmMessageRouteASRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 19), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsOut.setDescription('Number of Abort-Session-Request messages sent to the realm.')
cdbpRealmMessageRouteASAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 20), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsIn.setDescription('Number of Abort-Session-Answer messages received from the realm.')
cdbpRealmMessageRouteASAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 21), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsOut.setDescription('Number of Abort-Session-Answer messages sent to the realm.')
cdbpRealmMessageRouteAccRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccRetrans.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccRetrans.setDescription('The number of Diameter accounting packets retransmitted to this realm.')
cdbpRealmMessageRouteAccDupReqsts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccDupReqsts.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccDupReqsts.setDescription('The number of duplicate Diameter accounting packets sent to this realm.')
cdbpRealmMessageRoutePendReqstsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 24), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRoutePendReqstsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRoutePendReqstsOut.setDescription('The number of Diameter Accounting-Request packets sent to this peer that have not yet timed out or received a response. This variable is incremented when an Accounting-Request is sent to this server and decremented due to receipt of an Accounting-Response, a timeout or a retransmission.')
cdbpRealmMessageRouteReqstsDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteReqstsDrop.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteReqstsDrop.setDescription('The number of requests dropped by this realm.')
ciscoDiameterBasePMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 1))
ciscoDiameterBasePMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2))
ciscoDiameterBasePMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 1, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerStatsGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBNotificationsGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBTrapCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalStatsSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerStatsSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBRealmCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBRealmStatsSkippedGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBCompliance = ciscoDiameterBasePMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBCompliance.setDescription('The compliance statement for Diameter Base Protocol entities.')
ciscoDiameterBasePMIBLocalCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalRealm"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyEnabled"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyInfraState"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyLastSwitchover"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalOriginHost"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBLocalCfgGroup = ciscoDiameterBasePMIBLocalCfgGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalCfgGroup.setDescription('A collection of objects providing configuration common to the server.')
ciscoDiameterBasePMIBPeerCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 2)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerPortConnect"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerPortListen"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerProtocol"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerSecurity"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerFirmwareRevision"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerRowStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddressType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddress"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerCfgGroup = ciscoDiameterBasePMIBPeerCfgGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerCfgGroup.setDescription('A collection of objects providing configuration of the Diameter peers.')
ciscoDiameterBasePMIBPeerStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 3)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsState"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsStateDuration"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsLastDiscCause"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsWhoInitDisconnect"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWCurrentStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTimeoutConnAtmpts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCERsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCERsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCEAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCEAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRARsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRARsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRAAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRAAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWReqTimer"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRedirectEvents"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccDupRequests"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsMalformedReqsts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccsNotRecorded"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTotalRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccPendReqstsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccReqstsDropped"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsHByHDropMessages"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsEToEDupMessages"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsUnknownTypes"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsProtocolErrors"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransientFailures"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsPermanentFailures"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransportDown"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerStatsGroup = ciscoDiameterBasePMIBPeerStatsGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerStatsGroup.setDescription('A collection of objects providing statistics of the Diameter peers.')
ciscoDiameterBasePMIBNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 4)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtProtocolErrorNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtTransientFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPermanentFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPeerConnectionDownNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPeerConnectionUpNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBNotificationsGroup = ciscoDiameterBasePMIBNotificationsGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBNotificationsGroup.setDescription('The set of notifications which an agent is required to implement.')
ciscoDiameterBasePMIBTrapCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 5)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnableProtocolErrorNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnableTransientFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePermanentFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePeerConnectionDownNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePeerConnectionUpNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBTrapCfgGroup = ciscoDiameterBasePMIBTrapCfgGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBTrapCfgGroup.setDescription('A collection of objects providing configuration for base protocol notifications.')
ciscoDiameterBasePMIBLocalCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 6)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddrType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddress"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalTcpListenPort"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalSctpListenPort"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalUpTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalResetTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalConfigReset"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplRowStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerServices"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBLocalCfgSkippedGroup = ciscoDiameterBasePMIBLocalCfgSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalCfgSkippedGroup.setDescription('A collection of objects providing configuration common to the server.')
ciscoDiameterBasePMIBLocalStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 7)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalUpTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalResetTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalConfigReset"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBLocalStatsSkippedGroup = ciscoDiameterBasePMIBLocalStatsSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalStatsSkippedGroup.setDescription('A collection of objects providing statistics common to the server.')
ciscoDiameterBasePMIBPeerCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 8)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerCfgSkippedGroup = ciscoDiameterBasePMIBPeerCfgSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerCfgSkippedGroup.setDescription('A collection of objects providing configuration for Diameter peers.')
ciscoDiameterBasePMIBPeerStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 9)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWCurrentStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWReqTimer"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRedirectEvents"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccDupRequests"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsEToEDupMessages"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerStatsSkippedGroup = ciscoDiameterBasePMIBPeerStatsSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerStatsSkippedGroup.setDescription('A collection of objects providing statistics of Diameter peers.')
ciscoDiameterBasePMIBRealmCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 10)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeers"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeersChosen"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBRealmCfgSkippedGroup = ciscoDiameterBasePMIBRealmCfgSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBRealmCfgSkippedGroup.setDescription('A collection of objects providing configuration for realm message routing.')
ciscoDiameterBasePMIBRealmStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 11)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRealm"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteApp"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAction"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRARsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRARsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRAAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRAAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAccRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAccDupReqsts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRoutePendReqstsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteReqstsDrop"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBRealmStatsSkippedGroup = ciscoDiameterBasePMIBRealmStatsSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBRealmStatsSkippedGroup.setDescription('A collection of objects providing statistics of realm message routing.')
mibBuilder.exportSymbols("CISCO-DIAMETER-BASE-PROTOCOL-MIB", cdbpRealmMessageRouteACRsIn=cdbpRealmMessageRouteACRsIn, cdbpRealmStats=cdbpRealmStats, ciscoDiameterBasePMIBCompliance=ciscoDiameterBasePMIBCompliance, cdbpPeerStatsSTAsOut=cdbpPeerStatsSTAsOut, cdbpPeerProtocol=cdbpPeerProtocol, cdbpPeerTable=cdbpPeerTable, ciscoDiaBaseProtPeerConnectionDownNotif=ciscoDiaBaseProtPeerConnectionDownNotif, cdbpLocalVendorIndex=cdbpLocalVendorIndex, cdbpPeerStatsDWReqTimer=cdbpPeerStatsDWReqTimer, cdbpPeerStatsACAsIn=cdbpPeerStatsACAsIn, cdbpPeerStatsDWRsOut=cdbpPeerStatsDWRsOut, ciscoDiaBaseProtEnablePeerConnectionDownNotif=ciscoDiaBaseProtEnablePeerConnectionDownNotif, cdbpPeerStatsDPAsIn=cdbpPeerStatsDPAsIn, cdbpPeerId=cdbpPeerId, cdbpAppAdvFromPeerTable=cdbpAppAdvFromPeerTable, cdbpRealmMessageRouteSTRsIn=cdbpRealmMessageRouteSTRsIn, cdbpRealmMessageRouteApp=cdbpRealmMessageRouteApp, cdbpLocalVendorEntry=cdbpLocalVendorEntry, cdbpRealmMessageRouteAccDupReqsts=cdbpRealmMessageRouteAccDupReqsts, cdbpAppAdvToPeerVendorId=cdbpAppAdvToPeerVendorId, cdbpLocalIpAddrType=cdbpLocalIpAddrType, cdbpPeerSecurity=cdbpPeerSecurity, ciscoDiaBaseProtTransientFailureNotif=ciscoDiaBaseProtTransientFailureNotif, cdbpPeerStatsAccPendReqstsOut=cdbpPeerStatsAccPendReqstsOut, ciscoDiameterBasePMIBLocalCfgGroup=ciscoDiameterBasePMIBLocalCfgGroup, cdbpRealmMessageRouteRealm=cdbpRealmMessageRouteRealm, cdbpPeerEntry=cdbpPeerEntry, cdbpRedundancyLastSwitchover=cdbpRedundancyLastSwitchover, cdbpRealmMessageRouteAction=cdbpRealmMessageRouteAction, cdbpPeerIpAddrTable=cdbpPeerIpAddrTable, cdbpPeerStatsSTAsIn=cdbpPeerStatsSTAsIn, cdbpRealmCfgs=cdbpRealmCfgs, cdbpPeerStatsTransientFailures=cdbpPeerStatsTransientFailures, cdbpRealmKnownPeersIndex=cdbpRealmKnownPeersIndex, cdbpLocalVendorTable=cdbpLocalVendorTable, cdbpPeerStorageType=cdbpPeerStorageType, cdbpAppAdvFromPeerVendorId=cdbpAppAdvFromPeerVendorId, cdbpPeerStatsRAAsOut=cdbpPeerStatsRAAsOut, cdbpLocalId=cdbpLocalId, ciscoDiameterBasePMIBNotifs=ciscoDiameterBasePMIBNotifs, ciscoDiameterBasePMIBGroups=ciscoDiameterBasePMIBGroups, cdbpPeerStats=cdbpPeerStats, cdbpRealmMessageRouteASRsOut=cdbpRealmMessageRouteASRsOut, cdbpRealmMessageRouteAccRetrans=cdbpRealmMessageRouteAccRetrans, cdbpAppAdvToPeerServices=cdbpAppAdvToPeerServices, cdbpPeerStatsACRsOut=cdbpPeerStatsACRsOut, cdbpRedundancyEnabled=cdbpRedundancyEnabled, cdbpPeerVendorRowStatus=cdbpPeerVendorRowStatus, cdbpPeerStatsUnknownTypes=cdbpPeerStatsUnknownTypes, ciscoDiameterBasePMIBCompliances=ciscoDiameterBasePMIBCompliances, cdbpPeerStatsEToEDupMessages=cdbpPeerStatsEToEDupMessages, cdbpPeerVendorEntry=cdbpPeerVendorEntry, ciscoDiaBaseProtEnableProtocolErrorNotif=ciscoDiaBaseProtEnableProtocolErrorNotif, cdbpPeerStatsTable=cdbpPeerStatsTable, cdbpPeerIpAddrEntry=cdbpPeerIpAddrEntry, ciscoDiameterBasePMIBConform=ciscoDiameterBasePMIBConform, cdbpPeerStatsSTRsOut=cdbpPeerStatsSTRsOut, cdbpRealmMessageRouteIndex=cdbpRealmMessageRouteIndex, cdbpAppAdvToPeerIndex=cdbpAppAdvToPeerIndex, ciscoDiameterBasePMIBPeerStatsGroup=ciscoDiameterBasePMIBPeerStatsGroup, ciscoDiaBaseProtEnablePeerConnectionUpNotif=ciscoDiaBaseProtEnablePeerConnectionUpNotif, cdbpLocalApplRowStatus=cdbpLocalApplRowStatus, ciscoDiaBaseProtEnablePermanentFailureNotif=ciscoDiaBaseProtEnablePermanentFailureNotif, ciscoDiameterBasePMIBPeerStatsSkippedGroup=ciscoDiameterBasePMIBPeerStatsSkippedGroup, PYSNMP_MODULE_ID=ciscoDiameterBasePMIB, ciscoDiameterBasePMIBObjects=ciscoDiameterBasePMIBObjects, cdbpLocalRealm=cdbpLocalRealm, cdbpLocalVendorId=cdbpLocalVendorId, cdbpLocalResetTime=cdbpLocalResetTime, ciscoDiameterBasePMIBRealmCfgSkippedGroup=ciscoDiameterBasePMIBRealmCfgSkippedGroup, cdbpPeerStatsDPRsIn=cdbpPeerStatsDPRsIn, cdbpPeerStatsEntry=cdbpPeerStatsEntry, cdbpPeerStatsAccDupRequests=cdbpPeerStatsAccDupRequests, cdbpRealmMessageRoutePendReqstsOut=cdbpRealmMessageRoutePendReqstsOut, cdbpTrapCfgs=cdbpTrapCfgs, ciscoDiameterBasePMIBTrapCfgGroup=ciscoDiameterBasePMIBTrapCfgGroup, cdbpAppAdvFromPeerType=cdbpAppAdvFromPeerType, cdbpPeerIndex=cdbpPeerIndex, cdbpPeerVendorId=cdbpPeerVendorId, cdbpAppAdvToPeerRowStatus=cdbpAppAdvToPeerRowStatus, cdbpLocalStatsTotalPacketsOut=cdbpLocalStatsTotalPacketsOut, cdbpPeerStatsHByHDropMessages=cdbpPeerStatsHByHDropMessages, cdbpRealmMessageRouteASAsIn=cdbpRealmMessageRouteASAsIn, cdbpLocalStats=cdbpLocalStats, cdbpPeerStatsRedirectEvents=cdbpPeerStatsRedirectEvents, cdbpPeerStatsASRsOut=cdbpPeerStatsASRsOut, cdbpPeerStatsTotalRetrans=cdbpPeerStatsTotalRetrans, cdbpRealmMessageRouteEntry=cdbpRealmMessageRouteEntry, cdbpPeerStatsState=cdbpPeerStatsState, cdbpPeerStatsSTRsIn=cdbpPeerStatsSTRsIn, cdbpPeerFirmwareRevision=cdbpPeerFirmwareRevision, cdbpLocalTcpListenPort=cdbpLocalTcpListenPort, cdbpPeerStatsCERsOut=cdbpPeerStatsCERsOut, cdbpLocalApplStorageType=cdbpLocalApplStorageType, cdbpPeerStatsAccRetrans=cdbpPeerStatsAccRetrans, cdbpPeerStatsPermanentFailures=cdbpPeerStatsPermanentFailures, cdbpLocalIpAddrIndex=cdbpLocalIpAddrIndex, cdbpRealmKnownPeersEntry=cdbpRealmKnownPeersEntry, cdbpPeerStatsDWAsIn=cdbpPeerStatsDWAsIn, cdbpLocalStatsTotalUpTime=cdbpLocalStatsTotalUpTime, cdbpPeerStatsDPAsOut=cdbpPeerStatsDPAsOut, ciscoDiaBaseProtPermanentFailureNotif=ciscoDiaBaseProtPermanentFailureNotif, ciscoDiameterBasePMIBLocalStatsSkippedGroup=ciscoDiameterBasePMIBLocalStatsSkippedGroup, cdbpPeerStatsRAAsIn=cdbpPeerStatsRAAsIn, cdbpPeerStatsStateDuration=cdbpPeerStatsStateDuration, cdbpPeerStatsProtocolErrors=cdbpPeerStatsProtocolErrors, ciscoDiameterBasePMIBNotificationsGroup=ciscoDiameterBasePMIBNotificationsGroup, cdbpRealmMessageRouteACRsOut=cdbpRealmMessageRouteACRsOut, cdbpLocalApplEntry=cdbpLocalApplEntry, cdbpPeerStatsDWAsOut=cdbpPeerStatsDWAsOut, cdbpPeerStatsAccReqstsDropped=cdbpPeerStatsAccReqstsDropped, cdbpRealmKnownPeersTable=cdbpRealmKnownPeersTable, cdbpPeerStatsAccsNotRecorded=cdbpPeerStatsAccsNotRecorded, cdbpLocalVendorRowStatus=cdbpLocalVendorRowStatus, cdbpLocalIpAddress=cdbpLocalIpAddress, cdbpLocalIpAddrEntry=cdbpLocalIpAddrEntry, cdbpRealmMessageRouteRARsIn=cdbpRealmMessageRouteRARsIn, cdbpRealmMessageRouteACAsIn=cdbpRealmMessageRouteACAsIn, cdbpLocalOriginHost=cdbpLocalOriginHost, cdbpRealmMessageRouteRAAsIn=cdbpRealmMessageRouteRAAsIn, cdbpRealmMessageRouteRAAsOut=cdbpRealmMessageRouteRAAsOut, ciscoDiameterBasePMIBPeerCfgSkippedGroup=ciscoDiameterBasePMIBPeerCfgSkippedGroup, cdbpPeerPortConnect=cdbpPeerPortConnect, cdbpPeerStatsWhoInitDisconnect=cdbpPeerStatsWhoInitDisconnect, cdbpPeerStatsCEAsOut=cdbpPeerStatsCEAsOut, cdbpAppAdvFromPeerIndex=cdbpAppAdvFromPeerIndex, cdbpRealmMessageRouteASRsIn=cdbpRealmMessageRouteASRsIn, cdbpPeerStatsLastDiscCause=cdbpPeerStatsLastDiscCause, cdbpPeerStatsASAsIn=cdbpPeerStatsASAsIn, cdbpPeerIpAddressType=cdbpPeerIpAddressType, cdbpPeerStatsRARsOut=cdbpPeerStatsRARsOut, cdbpPeerStatsDWCurrentStatus=cdbpPeerStatsDWCurrentStatus, cdbpRealmMessageRouteSTRsOut=cdbpRealmMessageRouteSTRsOut, cdbpLocalCfgs=cdbpLocalCfgs, cdbpRealmMessageRouteReqstsDrop=cdbpRealmMessageRouteReqstsDrop, cdbpLocalStatsTotalPacketsIn=cdbpLocalStatsTotalPacketsIn, cdbpPeerCfgs=cdbpPeerCfgs, cdbpRealmKnownPeers=cdbpRealmKnownPeers, cdbpPeerStatsMalformedReqsts=cdbpPeerStatsMalformedReqsts, cdbpRealmMessageRouteRARsOut=cdbpRealmMessageRouteRARsOut, cdbpRealmMessageRouteSTAsOut=cdbpRealmMessageRouteSTAsOut, cdbpLocalIpAddrTable=cdbpLocalIpAddrTable, cdbpPeerStatsACRsIn=cdbpPeerStatsACRsIn, ciscoDiameterBasePMIBRealmStatsSkippedGroup=ciscoDiameterBasePMIBRealmStatsSkippedGroup, cdbpRealmKnownPeersChosen=cdbpRealmKnownPeersChosen, cdbpLocalApplTable=cdbpLocalApplTable, cdbpRealmMessageRouteType=cdbpRealmMessageRouteType, cdbpPeerStatsASRsIn=cdbpPeerStatsASRsIn, cdbpPeerStatsTransportDown=cdbpPeerStatsTransportDown, cdbpRedundancyInfraState=cdbpRedundancyInfraState, ciscoDiameterBasePMIBPeerCfgGroup=ciscoDiameterBasePMIBPeerCfgGroup, cdbpRealmMessageRouteACAsOut=cdbpRealmMessageRouteACAsOut, cdbpAppAdvFromPeerEntry=cdbpAppAdvFromPeerEntry, ciscoDiaBaseProtEnableTransientFailureNotif=ciscoDiaBaseProtEnableTransientFailureNotif, cdbpLocalConfigReset=cdbpLocalConfigReset, cdbpPeerIpAddress=cdbpPeerIpAddress, cdbpAppAdvToPeerTable=cdbpAppAdvToPeerTable, cdbpPeerStatsTimeoutConnAtmpts=cdbpPeerStatsTimeoutConnAtmpts, cdbpPeerStatsDWRsIn=cdbpPeerStatsDWRsIn, cdbpRealmMessageRouteTable=cdbpRealmMessageRouteTable, cdbpPeerStatsRARsIn=cdbpPeerStatsRARsIn, cdbpPeerStatsACAsOut=cdbpPeerStatsACAsOut, cdbpRealmMessageRouteSTAsIn=cdbpRealmMessageRouteSTAsIn, cdbpPeerStatsASAsOut=cdbpPeerStatsASAsOut, cdbpPeerStatsDPRsOut=cdbpPeerStatsDPRsOut, cdbpPeerVendorTable=cdbpPeerVendorTable, ciscoDiaBaseProtPeerConnectionUpNotif=ciscoDiaBaseProtPeerConnectionUpNotif, cdbpPeerVendorStorageType=cdbpPeerVendorStorageType, cdbpPeerVendorIndex=cdbpPeerVendorIndex, cdbpPeerStatsCERsIn=cdbpPeerStatsCERsIn, cdbpRealmMessageRouteASAsOut=cdbpRealmMessageRouteASAsOut, ciscoDiameterBasePMIBLocalCfgSkippedGroup=ciscoDiameterBasePMIBLocalCfgSkippedGroup, cdbpPeerPortListen=cdbpPeerPortListen, cdbpAppAdvToPeerEntry=cdbpAppAdvToPeerEntry, ciscoDiaBaseProtProtocolErrorNotif=ciscoDiaBaseProtProtocolErrorNotif, ciscoDiameterBasePMIB=ciscoDiameterBasePMIB, cdbpLocalApplIndex=cdbpLocalApplIndex, cdbpAppAdvToPeerStorageType=cdbpAppAdvToPeerStorageType, cdbpLocalVendorStorageType=cdbpLocalVendorStorageType, cdbpPeerIpAddressIndex=cdbpPeerIpAddressIndex, cdbpPeerRowStatus=cdbpPeerRowStatus, cdbpLocalSctpListenPort=cdbpLocalSctpListenPort, cdbpPeerStatsCEAsIn=cdbpPeerStatsCEAsIn)
| 174.617021 | 9,504 | 0.799176 |
48ff11c606361c503d4ae242b33d2e5d2c9cf908 | 1,337 | py | Python | py_build/funcs.py | Aesonus/py-build | 790a750492b0f6ecd52f6f564d3aa4522e255406 | [
"MIT"
] | null | null | null | py_build/funcs.py | Aesonus/py-build | 790a750492b0f6ecd52f6f564d3aa4522e255406 | [
"MIT"
] | null | null | null | py_build/funcs.py | Aesonus/py-build | 790a750492b0f6ecd52f6f564d3aa4522e255406 | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing import Callable, Sequence, TYPE_CHECKING
import functools
if TYPE_CHECKING:
from .build import BuildStepCallable
def print_step_name(formatter=split_step_name, args: Sequence=()):
"""Gets a decorator that formats the name of the build step and prints it"""
fmt_args = args
return format_step_name
def composed(*decorators: BuildStepCallable) -> BuildStepCallable:
"""
Used to compose a decorator. Useful for defining specific
outputs and progress reports to a build step and resusing
"""
return decorated
| 33.425 | 80 | 0.682872 |
48ff6f626f5b448c258b452afb93725c786ec289 | 3,713 | py | Python | src/jellyroll/managers.py | jacobian-archive/jellyroll | 02751b3108b6f6ae732a801d42ca3c85cc759978 | [
"BSD-3-Clause"
] | 3 | 2015-03-02T06:34:45.000Z | 2016-11-24T18:53:59.000Z | src/jellyroll/managers.py | jacobian/jellyroll | 02751b3108b6f6ae732a801d42ca3c85cc759978 | [
"BSD-3-Clause"
] | null | null | null | src/jellyroll/managers.py | jacobian/jellyroll | 02751b3108b6f6ae732a801d42ca3c85cc759978 | [
"BSD-3-Clause"
] | null | null | null | import datetime
from django.db import models
from django.db.models import signals
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_unicode
from tagging.fields import TagField
| 35.361905 | 122 | 0.578777 |
5b011773dfebfb2a161d58f218cd80c611a2ea9c | 578 | py | Python | app_metrics.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
] | 33 | 2018-01-16T02:04:51.000Z | 2022-03-22T22:57:29.000Z | app_metrics.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
] | 7 | 2019-06-16T22:02:03.000Z | 2021-10-02T13:45:31.000Z | app_metrics.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
] | 14 | 2019-06-01T21:39:24.000Z | 2022-03-14T17:56:43.000Z | """
metrics application instance
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import os
from byceps.config import ConfigurationError
from byceps.metrics.application import create_app
ENV_VAR_NAME_DATABASE_URI = 'DATABASE_URI'
database_uri = os.environ.get(ENV_VAR_NAME_DATABASE_URI)
if not database_uri:
raise ConfigurationError(
f"No database URI was specified via the '{ENV_VAR_NAME_DATABASE_URI}' "
"environment variable.",
)
app = create_app(database_uri)
| 22.230769 | 79 | 0.730104 |
5b0196e4037e9465e0b4a7171647fde301968ecb | 1,927 | py | Python | mysql_tests/test_schema.py | maestro-1/gino | 3f06b4a9948a7657044686ae738ef3509b4729e1 | [
"BSD-3-Clause"
] | 1,376 | 2019-12-26T23:41:36.000Z | 2022-03-31T11:08:04.000Z | mysql_tests/test_schema.py | maestro-1/gino | 3f06b4a9948a7657044686ae738ef3509b4729e1 | [
"BSD-3-Clause"
] | 522 | 2017-07-22T00:49:06.000Z | 2019-12-25T17:02:22.000Z | mysql_tests/test_schema.py | maestro-1/gino | 3f06b4a9948a7657044686ae738ef3509b4729e1 | [
"BSD-3-Clause"
] | 89 | 2020-01-02T02:12:37.000Z | 2022-03-21T14:14:51.000Z | from enum import Enum
import pytest
import gino
from gino.dialects.aiomysql import AsyncEnum
pytestmark = pytest.mark.asyncio
db = gino.Gino()
blog_seq = db.Sequence("blog_seq", metadata=db, schema="schema_test")
| 30.109375 | 78 | 0.701609 |
5b0240511c5c9c995140e0add95f3c10735d13f4 | 903 | py | Python | solutions/29-distinct-powers.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
] | null | null | null | solutions/29-distinct-powers.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
] | null | null | null | solutions/29-distinct-powers.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
] | null | null | null | # problem 29
# Distinct powers
"""
Consider all integer combinations of ab for 2 a 5 and 2 b 5:
2**2=4, 2**3=8, 2**4=16, 2**5=32
3**2=9, 3**3=27, 3**4=81, 3**5=243
4**2=16, 4**3=64, 4**4=256, 4**5=1024
5**2=25, 5**3=125, 5**4=625, 5**5=3125
If they are then placed in numerical order, with any repeats removed,
we get the following sequence of 15 distinct terms:
4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
How many distinct terms are in the sequence generated by ab for
2 a 100 and 2 b 100?
"""
# analysis
"""
^ | 2 | 3 | 4 | 5 | N |
---+---+---+---+----+---+
2 | 4 | 8 | 16| 25 |2^N|
---+---+---+---+----+---+
3 | 9 | 27| 81| 243|3^N|
---+---+---+---+----+---+
4 | 16| 64|256|1024|4^N|
---+---+---+---+----+---+
5 | 25|125|625|3125|5^N|
---+---+---+---+----+---+
"""
# solution
s = set(a**b for a in range(2, 101) for b in range(2, 101))
print(len(s))
| 25.083333 | 70 | 0.499446 |
5b0340e8c87e83abc062cbdb7773314cbba482e5 | 2,633 | py | Python | flexget/plugins/input/input_csv.py | metaMMA/Flexget | a38986422461d7935ead1e2b4ed4c88bcd0a90f5 | [
"MIT"
] | null | null | null | flexget/plugins/input/input_csv.py | metaMMA/Flexget | a38986422461d7935ead1e2b4ed4c88bcd0a90f5 | [
"MIT"
] | 1 | 2017-10-09T23:06:44.000Z | 2017-10-09T23:06:44.000Z | flexget/plugins/input/input_csv.py | metaMMA/Flexget | a38986422461d7935ead1e2b4ed4c88bcd0a90f5 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.utils import PY3
import logging
import csv
from requests import RequestException
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.cached_input import cached
log = logging.getLogger('csv')
| 29.255556 | 87 | 0.565515 |
5b03dd11f975d3847001932de43a5378848ce948 | 2,043 | py | Python | gdget.py | tienfuc/gdcmdtools | 357ada27cdb6ef0cc155b8fb52b6f6368cd1f277 | [
"BSD-2-Clause"
] | 29 | 2015-09-10T08:00:30.000Z | 2021-12-24T01:15:53.000Z | gdget.py | tienfuc/gdcmdtools | 357ada27cdb6ef0cc155b8fb52b6f6368cd1f277 | [
"BSD-2-Clause"
] | 56 | 2015-09-10T02:56:16.000Z | 2020-10-06T13:17:21.000Z | gdget.py | tienfuc/gdcmdtools | 357ada27cdb6ef0cc155b8fb52b6f6368cd1f277 | [
"BSD-2-Clause"
] | 4 | 2015-09-30T03:35:33.000Z | 2019-07-07T14:19:26.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import re
from gdcmdtools.base import BASE_INFO
from gdcmdtools.base import DEBUG_LEVEL
from gdcmdtools.get import GDGet
from gdcmdtools.get import export_format
import argparse
from argparse import RawTextHelpFormatter
from pprint import pprint
import logging
logger = logging.getLogger()
__THIS_APP = 'gdget'
__THIS_DESCRIPTION = 'Tool to download file from Google Drive'
__THIS_VERSION = BASE_INFO["version"]
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='%s v%s - %s - %s (%s)' %
(__THIS_APP,
__THIS_VERSION,
__THIS_DESCRIPTION,
BASE_INFO["app"],
BASE_INFO["description"]),
formatter_class=RawTextHelpFormatter)
arg_parser.add_argument(
'file_id',
help='The file id or drive link for the file you\'re going to download')
help_export_format = "\n".join(
[
re.search(
".*google-apps\.(.*)",
k).group(1) +
": " +
", ".join(
export_format[k]) for k in export_format.iterkeys()])
arg_parser.add_argument(
'-f',
'--export_format',
metavar='FORMAT',
default='raw',
required=False,
help='specify the export format for downloading,\ngoogle_format: export_format\n%s' %
help_export_format)
arg_parser.add_argument(
'-s',
'--save_as',
metavar='NEW_FILE_NAME',
help='save the downloaded file as ')
arg_parser.add_argument('--debug',
choices=DEBUG_LEVEL,
default=DEBUG_LEVEL[-1],
help='define the debug level')
args = arg_parser.parse_args()
# set debug devel
logger.setLevel(getattr(logging, args.debug.upper()))
logger.debug(args)
get = GDGet(args.file_id, args.export_format, args.save_as)
result = get.run()
sys.exit(0)
| 24.614458 | 93 | 0.603035 |
5b042f6383e41d397423d2d9b9c278a9f5788a29 | 325 | py | Python | Lotus/controller/common.py | Jayin/Lotus | 6a4791d81b29158a1a83aa6a5d607ab5d677dba4 | [
"Apache-2.0"
] | null | null | null | Lotus/controller/common.py | Jayin/Lotus | 6a4791d81b29158a1a83aa6a5d607ab5d677dba4 | [
"Apache-2.0"
] | null | null | null | Lotus/controller/common.py | Jayin/Lotus | 6a4791d81b29158a1a83aa6a5d607ab5d677dba4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from Lotus.app import app
from flask import render_template
| 16.25 | 38 | 0.713846 |
5b062f73819a8130b2460252ff5ee938a80ac7e2 | 8,261 | py | Python | src/retrocookie/git.py | cjolowicz/retrocookie | bc15dd46070ce50df21eeb016a385529d601f2b0 | [
"MIT"
] | 15 | 2020-06-21T14:35:42.000Z | 2022-03-30T15:48:55.000Z | src/retrocookie/git.py | cjolowicz/retrocookie | bc15dd46070ce50df21eeb016a385529d601f2b0 | [
"MIT"
] | 223 | 2020-05-22T14:35:05.000Z | 2022-03-28T00:19:23.000Z | src/retrocookie/git.py | cjolowicz/retrocookie | bc15dd46070ce50df21eeb016a385529d601f2b0 | [
"MIT"
] | 4 | 2020-11-19T12:55:01.000Z | 2022-03-15T14:24:25.000Z | """Git interface."""
from __future__ import annotations
import contextlib
import functools
import operator
import re
import subprocess # noqa: S404
from dataclasses import dataclass
from dataclasses import field
from pathlib import Path
from typing import Any
from typing import cast
from typing import Iterator
from typing import List
from typing import Optional
import pygit2
from retrocookie.utils import removeprefix
def git(
*args: str, check: bool = True, **kwargs: Any
) -> subprocess.CompletedProcess[str]:
"""Invoke git."""
return subprocess.run( # noqa: S603,S607
["git", *args], check=check, text=True, capture_output=True, **kwargs
)
VERSION_PATTERN = re.compile(
r"""
(?P<major>\d+)\.
(?P<minor>\d+)
(\.(?P<patch>\d+))?
""",
re.VERBOSE,
)
def version() -> Version:
"""Return the git version."""
text = git("version").stdout.strip()
text = removeprefix(text, "git version ")
return Version.parse(text)
def get_default_branch() -> str:
"""Return the default branch for new repositories."""
get_configs = [
pygit2.Config.get_global_config,
pygit2.Config.get_system_config,
]
for get_config in get_configs:
with contextlib.suppress(IOError, KeyError):
config = get_config()
branch = config["init.defaultBranch"]
assert isinstance(branch, str) # noqa: S101
return branch
return "master"
def get_current_branch(self) -> str:
"""Return the current branch."""
return self.repo.head.shorthand # type: ignore[no-any-return]
def exists_branch(self, branch: str) -> bool:
"""Return True if the branch exists."""
return branch in self.repo.branches
def switch_branch(self, branch: str) -> None:
"""Switch the current branch."""
self.repo.checkout(self.repo.branches[branch])
def update_remote(self) -> None:
"""Update the remotes."""
self.git("remote", "update")
def fetch_commits(self, source: Repository, *commits: str) -> None:
"""Fetch the given commits and their immediate parents."""
path = source.path.resolve()
self.git("fetch", "--no-tags", "--depth=2", str(path), *commits)
def push(self, remote: str, *refs: str, force: bool = False) -> None:
"""Update remote refs."""
options = ["--force-with-lease"] if force else []
self.git("push", *options, remote, *refs)
def parse_revisions(self, *revisions: str) -> List[str]:
"""Parse revisions using the format specified in gitrevisions(7)."""
process = self.git("rev-list", "--no-walk", *revisions)
result = process.stdout.split()
result.reverse()
return result
def lookup_replacement(self, commit: str) -> str:
"""Lookup the replace ref for the given commit."""
refname = f"refs/replace/{commit}"
ref = self.repo.lookup_reference(refname)
return cast(str, ref.target.hex)
def _ensure_relative(self, path: Path) -> Path:
"""Interpret the path relative to the repository root."""
return path.relative_to(self.path) if path.is_absolute() else path
def read_text(self, path: Path, *, ref: str = "HEAD") -> str:
"""Return the contents of the blob at the given path."""
commit = self.repo.revparse_single(ref)
path = self._ensure_relative(path)
blob = functools.reduce(operator.truediv, path.parts, commit.tree)
return cast(str, blob.data.decode())
def exists(self, path: Path, *, ref: str = "HEAD") -> bool:
"""Return True if a blob exists at the given path."""
commit = self.repo.revparse_single(ref)
path = self._ensure_relative(path)
try:
functools.reduce(operator.truediv, path.parts, commit.tree)
return True
except KeyError:
return False
def add(self, *paths: Path) -> None:
"""Add paths to the index."""
for path in paths:
path = self._ensure_relative(path)
self.repo.index.add(path)
else:
self.repo.index.add_all()
self.repo.index.write()
def commit(self, message: str) -> None:
"""Create a commit."""
try:
head = self.repo.head
refname = head.name
parents = [head.target]
except pygit2.GitError:
branch = get_default_branch()
refname = f"refs/heads/{branch}"
parents = []
tree = self.repo.index.write_tree()
author = committer = self.repo.default_signature
self.repo.create_commit(refname, author, committer, message, tree, parents)
def cherrypick(self, *refs: str) -> None:
"""Cherry-pick the given commits."""
self.git("cherry-pick", *refs)
def add_worktree(
self,
branch: str,
path: Path,
*,
base: str = "HEAD",
force: bool = False,
) -> Repository:
"""Add a worktree."""
self.git(
"worktree",
"add",
str(path),
"--no-track",
"-B" if force else "-b",
branch,
base,
)
return Repository(path)
def remove_worktree(self, path: Path, *, force: bool = False) -> None:
"""Remove a worktree."""
if force:
self.git("worktree", "remove", "--force", str(path))
else:
self.git("worktree", "remove", str(path))
| 30.824627 | 86 | 0.587459 |
5b084682efe35e9ca46aead0d385f2c28ccda23b | 5,630 | py | Python | apps/user/views.py | awsbreathpanda/dailyfresh | c218cdc3ea261b695ff00b6781ba3040f5d06eff | [
"MIT"
] | null | null | null | apps/user/views.py | awsbreathpanda/dailyfresh | c218cdc3ea261b695ff00b6781ba3040f5d06eff | [
"MIT"
] | 7 | 2021-03-30T14:18:30.000Z | 2022-01-13T03:13:37.000Z | apps/user/views.py | awsbreathpanda/dailyfresh | c218cdc3ea261b695ff00b6781ba3040f5d06eff | [
"MIT"
] | null | null | null | from django.shortcuts import redirect
from django.contrib.auth import authenticate, login, logout
from celery_tasks.tasks import celery_send_mail
from apps.user.models import User
import re
from django.shortcuts import render
from django.views import View
from utils.security import get_user_token, get_activation_link, get_user_id
from django.conf import settings
from django.http import HttpResponse
from django.urls import reverse
# Create your views here.
# /user/register
# /user/activate/(token)
# /user/login
# /user/
# /user/order/(page)
# /user/address
# /user/logout
| 31.80791 | 83 | 0.59325 |
5b08fda32750d87556f3ccf00e2fba375865e05c | 2,666 | py | Python | heatzy/pilote_v1.py | Devotics/heatzy-home-hassistant | 34ef71604d10b1d45be4cfb17d811bdd33042ce7 | [
"MIT"
] | 22 | 2019-03-07T22:51:12.000Z | 2021-03-06T12:14:50.000Z | heatzy/pilote_v1.py | Devotics/heatzy-home-hassistant | 34ef71604d10b1d45be4cfb17d811bdd33042ce7 | [
"MIT"
] | 15 | 2019-03-07T13:04:11.000Z | 2021-03-11T21:34:34.000Z | heatzy/pilote_v1.py | Devotics/heatzy-home-hassistant | 34ef71604d10b1d45be4cfb17d811bdd33042ce7 | [
"MIT"
] | 7 | 2019-11-17T11:01:50.000Z | 2021-02-24T18:13:28.000Z | from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (HVAC_MODE_AUTO,
PRESET_AWAY,
PRESET_COMFORT, PRESET_ECO,
PRESET_NONE,
SUPPORT_PRESET_MODE)
from homeassistant.const import TEMP_CELSIUS
HEATZY_TO_HA_STATE = {
'\u8212\u9002': PRESET_COMFORT,
'\u7ecf\u6d4e': PRESET_ECO,
'\u89e3\u51bb': PRESET_AWAY,
'\u505c\u6b62': PRESET_NONE,
}
HA_TO_HEATZY_STATE = {
PRESET_COMFORT: [1, 1, 0],
PRESET_ECO: [1, 1, 1],
PRESET_AWAY: [1, 1, 2],
PRESET_NONE: [1, 1, 3],
}
| 27.484536 | 79 | 0.577269 |
5b0af9dfbe74e34130cf9a393f33916249893c28 | 8,315 | py | Python | kubernetes-the-hard-way/system/collections/ansible_collections/community/general/plugins/modules/cloud/misc/proxmox_template.py | jkroepke/homelab | ffdd849e39b52972870f5552e734fd74cb1254a1 | [
"Apache-2.0"
] | 5 | 2020-12-16T21:42:09.000Z | 2022-03-28T16:04:32.000Z | kubernetes-the-hard-way/system/collections/ansible_collections/community/general/plugins/modules/cloud/misc/proxmox_template.py | jkroepke/kubernetes-the-hard-way | 70fd096a04addec0777744c9731a4e3fbdc40c8f | [
"Apache-2.0"
] | null | null | null | kubernetes-the-hard-way/system/collections/ansible_collections/community/general/plugins/modules/cloud/misc/proxmox_template.py | jkroepke/kubernetes-the-hard-way | 70fd096a04addec0777744c9731a4e3fbdc40c8f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright: Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: proxmox_template
short_description: management of OS templates in Proxmox VE cluster
description:
- allows you to upload/delete templates in Proxmox VE cluster
options:
api_host:
description:
- the host of the Proxmox VE cluster
type: str
required: true
api_user:
description:
- the user to authenticate with
type: str
required: true
api_password:
description:
- the password to authenticate with
- you can use PROXMOX_PASSWORD environment variable
type: str
validate_certs:
description:
- enable / disable https certificate verification
default: 'no'
type: bool
node:
description:
- Proxmox VE node, when you will operate with template
type: str
required: true
src:
description:
- path to uploaded file
- required only for C(state=present)
type: path
template:
description:
- the template name
- required only for states C(absent), C(info)
type: str
content_type:
description:
- content type
- required only for C(state=present)
type: str
default: 'vztmpl'
choices: ['vztmpl', 'iso']
storage:
description:
- target storage
type: str
default: 'local'
timeout:
description:
- timeout for operations
type: int
default: 30
force:
description:
- can be used only with C(state=present), exists template will be overwritten
type: bool
default: 'no'
state:
description:
- Indicate desired state of the template
type: str
choices: ['present', 'absent']
default: present
notes:
- Requires proxmoxer and requests modules on host. This modules can be installed with pip.
requirements: [ "proxmoxer", "requests" ]
author: Sergei Antipov (@UnderGreen)
'''
EXAMPLES = '''
- name: Upload new openvz template with minimal options
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
src: ~/ubuntu-14.04-x86_64.tar.gz
- name: >
Upload new openvz template with minimal options use environment
PROXMOX_PASSWORD variable(you should export it before)
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_host: node1
src: ~/ubuntu-14.04-x86_64.tar.gz
- name: Upload new openvz template with all options and force overwrite
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
storage: local
content_type: vztmpl
src: ~/ubuntu-14.04-x86_64.tar.gz
force: yes
- name: Delete template with minimal options
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
template: ubuntu-14.04-x86_64.tar.gz
state: absent
'''
import os
import time
try:
from proxmoxer import ProxmoxAPI
HAS_PROXMOXER = True
except ImportError:
HAS_PROXMOXER = False
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
| 33.26 | 138 | 0.657486 |
5b0b336675387a3e79e4c5c116c3b8865c4ef0c0 | 9,024 | py | Python | polling_stations/apps/councils/management/commands/import_councils.py | DemocracyClub/UK-Polling-Stations | d5c428fc7fbccf0c13a84fa0045dfd332b2879e7 | [
"BSD-3-Clause"
] | 29 | 2015-03-10T08:41:34.000Z | 2022-01-12T08:51:38.000Z | polling_stations/apps/councils/management/commands/import_councils.py | DemocracyClub/UK-Polling-Stations | d5c428fc7fbccf0c13a84fa0045dfd332b2879e7 | [
"BSD-3-Clause"
] | 4,112 | 2015-04-01T21:27:38.000Z | 2022-03-31T19:22:11.000Z | polling_stations/apps/councils/management/commands/import_councils.py | DemocracyClub/UK-Polling-Stations | d5c428fc7fbccf0c13a84fa0045dfd332b2879e7 | [
"BSD-3-Clause"
] | 31 | 2015-03-18T14:52:50.000Z | 2022-02-24T10:31:07.000Z | import json
from html import unescape
import requests
from django.apps import apps
from django.contrib.gis.geos import GEOSGeometry, MultiPolygon, Polygon
from django.conf import settings
from django.core.management.base import BaseCommand
from requests.exceptions import HTTPError
from retry import retry
from councils.models import Council, CouncilGeography
from polling_stations.settings.constants.councils import WELSH_COUNCIL_NAMES
NIR_IDS = [
"ABC",
"AND",
"ANN",
"BFS",
"CCG",
"DRS",
"FMO",
"LBC",
"MEA",
"MUL",
"NMD",
]
| 37.6 | 275 | 0.614251 |
5b0b4a59e216a0cba015910bd19bb58090619801 | 3,693 | py | Python | saleor/webhook/observability/payload_schema.py | DevPoke/saleor | ced3a2249a18031f9f593e71d1d18aa787ec1060 | [
"CC-BY-4.0"
] | null | null | null | saleor/webhook/observability/payload_schema.py | DevPoke/saleor | ced3a2249a18031f9f593e71d1d18aa787ec1060 | [
"CC-BY-4.0"
] | null | null | null | saleor/webhook/observability/payload_schema.py | DevPoke/saleor | ced3a2249a18031f9f593e71d1d18aa787ec1060 | [
"CC-BY-4.0"
] | null | null | null | from datetime import datetime
from enum import Enum
from json.encoder import ESCAPE_ASCII, ESCAPE_DCT # type: ignore
from typing import List, Optional, Tuple, TypedDict
HttpHeaders = List[Tuple[str, str]]
| 24.296053 | 79 | 0.642296 |
5b0e8250fd1078639a824b073c3ab62b92fe28cf | 4,537 | py | Python | NMTK_apps/NMTK_server/wms/djpaste.py | bhargavasana/nmtk | 9bebfcc4b43c28a1f2b2574060ea3195fca2c7dd | [
"Unlicense"
] | null | null | null | NMTK_apps/NMTK_server/wms/djpaste.py | bhargavasana/nmtk | 9bebfcc4b43c28a1f2b2574060ea3195fca2c7dd | [
"Unlicense"
] | null | null | null | NMTK_apps/NMTK_server/wms/djpaste.py | bhargavasana/nmtk | 9bebfcc4b43c28a1f2b2574060ea3195fca2c7dd | [
"Unlicense"
] | null | null | null | # (c) 2013 Chander Ganesan and contributors; written to work with Django and Paste (http://pythonpaste.org)
# Paste CGI "middleware" for Django by Chander Ganesan <chander@otg-nc.com>
# Open Technology Group, Inc <http://www.otg-nc.com>
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
import os
import sys
import subprocess
import urllib
try:
import select
except ImportError:
select = None
from paste.util import converters
from paste.cgiapp import *
from paste.cgiapp import StdinReader, proc_communicate
from paste.cgiapp import CGIApplication as PasteCGIApplication
import urllib
from django.http import HttpResponse
# Taken from http://plumberjack.blogspot.com/2009/09/how-to-treat-logger-like-output-stream.html
import logging
mod_logger=logging.getLogger(__name__)
| 36.007937 | 107 | 0.561384 |
5b0f67ce020d1273d176ad58ddcab8801ec9c7f2 | 181 | py | Python | Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py | Arbupa/DAS_Sistemas | 52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1 | [
"MIT"
] | 41 | 2017-09-26T09:36:32.000Z | 2022-03-19T18:05:25.000Z | Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py | Arbupa/DAS_Sistemas | 52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1 | [
"MIT"
] | 67 | 2017-09-11T05:06:12.000Z | 2022-02-14T04:44:04.000Z | Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py | Arbupa/DAS_Sistemas | 52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1 | [
"MIT"
] | 210 | 2017-09-01T00:10:08.000Z | 2022-03-19T18:05:12.000Z | cantidad= input("Cuantas personas van a cenar?")
cant = int(cantidad)
print(cant)
if cant > 8:
print("Lo siento, tendran que esperar")
else:
print("La mesa esta lista")
| 15.083333 | 48 | 0.674033 |
5b0faab2d16278cb33dcd52c6711c4e057f78b52 | 7,424 | py | Python | build/piman.app/pysnmp/carrier/asyncore/dgram/base.py | jackgisel/team-athens | 91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0 | [
"Apache-2.0"
] | null | null | null | build/piman.app/pysnmp/carrier/asyncore/dgram/base.py | jackgisel/team-athens | 91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0 | [
"Apache-2.0"
] | null | null | null | build/piman.app/pysnmp/carrier/asyncore/dgram/base.py | jackgisel/team-athens | 91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0 | [
"Apache-2.0"
] | null | null | null | #
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
import socket
import errno
import sys
from pysnmp.carrier.asyncore.base import AbstractSocketTransport
from pysnmp.carrier import sockfix, sockmsg, error
from pysnmp import debug
# Ignore these socket errors
sockErrors = {errno.ESHUTDOWN: True,
errno.ENOTCONN: True,
errno.ECONNRESET: False,
errno.ECONNREFUSED: False,
errno.EAGAIN: False,
errno.EWOULDBLOCK: False}
if hasattr(errno, 'EBADFD'):
# bad FD may happen upon FD closure on n-1 select() event
sockErrors[errno.EBADFD] = True
| 40.791209 | 237 | 0.630119 |
5b10e569de8510acb457502268786c36584d12b7 | 5,539 | py | Python | src/coreclr/scripts/superpmi-replay.py | JimmyCushnie/runtime | b7eb82871f1d742efb444873e11dd6241cea73d2 | [
"MIT"
] | 2 | 2021-05-04T11:27:27.000Z | 2021-06-18T14:04:08.000Z | src/coreclr/scripts/superpmi-replay.py | JimmyCushnie/runtime | b7eb82871f1d742efb444873e11dd6241cea73d2 | [
"MIT"
] | 18 | 2019-12-03T00:21:59.000Z | 2022-01-30T04:45:58.000Z | src/coreclr/scripts/superpmi-replay.py | JimmyCushnie/runtime | b7eb82871f1d742efb444873e11dd6241cea73d2 | [
"MIT"
] | 2 | 2022-01-23T12:24:04.000Z | 2022-02-07T15:44:03.000Z | #!/usr/bin/env python3
#
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the MIT license.
#
##
# Title : superpmi_setup.py
#
# Notes:
#
# Script to run "superpmi replay" for various collections under various COMPlus_JitStressRegs value.
################################################################################
################################################################################
import argparse
from os import path
import os
from os import listdir
from coreclr_arguments import *
from superpmi_setup import run_command
parser = argparse.ArgumentParser(description="description")
parser.add_argument("-arch", help="Architecture")
parser.add_argument("-platform", help="OS platform")
parser.add_argument("-jit_directory", help="path to the directory containing clrjit binaries")
parser.add_argument("-log_directory", help="path to the directory containing superpmi log files")
jit_flags = [
"JitStressRegs=0",
"JitStressRegs=1",
"JitStressRegs=2",
"JitStressRegs=3",
"JitStressRegs=4",
"JitStressRegs=8",
"JitStressRegs=0x10",
"JitStressRegs=0x80",
"JitStressRegs=0x1000",
]
def setup_args(args):
""" Setup the args for SuperPMI to use.
Args:
args (ArgParse): args parsed by arg parser
Returns:
args (CoreclrArguments)
"""
coreclr_args = CoreclrArguments(args, require_built_core_root=False, require_built_product_dir=False,
require_built_test_dir=False, default_build_type="Checked")
coreclr_args.verify(args,
"arch",
lambda unused: True,
"Unable to set arch")
coreclr_args.verify(args,
"platform",
lambda unused: True,
"Unable to set platform")
coreclr_args.verify(args,
"jit_directory",
lambda jit_directory: os.path.isdir(jit_directory),
"jit_directory doesn't exist")
coreclr_args.verify(args,
"log_directory",
lambda log_directory: True,
"log_directory doesn't exist")
return coreclr_args
def main(main_args):
"""Main entrypoint
Args:
main_args ([type]): Arguments to the script
"""
python_path = sys.executable
cwd = os.path.dirname(os.path.realpath(__file__))
coreclr_args = setup_args(main_args)
spmi_location = path.join(cwd, "artifacts", "spmi")
log_directory = coreclr_args.log_directory
platform_name = coreclr_args.platform
os_name = "win" if platform_name.lower() == "windows" else "unix"
arch_name = coreclr_args.arch
host_arch_name = "x64" if arch_name.endswith("64") else "x86"
jit_path = path.join(coreclr_args.jit_directory, 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
print("Running superpmi.py download")
run_command([python_path, path.join(cwd, "superpmi.py"), "download", "--no_progress", "-target_os", platform_name,
"-target_arch", arch_name, "-core_root", cwd, "-spmi_location", spmi_location], _exit_on_fail=True)
failed_runs = []
for jit_flag in jit_flags:
log_file = path.join(log_directory, 'superpmi_{}.log'.format(jit_flag.replace("=", "_")))
print("Running superpmi.py replay for {}".format(jit_flag))
_, _, return_code = run_command([
python_path, path.join(cwd, "superpmi.py"), "replay", "-core_root", cwd,
"-jitoption", jit_flag, "-jitoption", "TieredCompilation=0",
"-target_os", platform_name, "-target_arch", arch_name,
"-arch", host_arch_name,
"-jit_path", jit_path, "-spmi_location", spmi_location,
"-log_level", "debug", "-log_file", log_file])
if return_code != 0:
failed_runs.append("Failure in {}".format(log_file))
# Consolidate all superpmi_*.logs in superpmi_platform_architecture.log
final_log_name = path.join(log_directory, "superpmi_{}_{}.log".format(platform_name, arch_name))
print("Consolidating final {}".format(final_log_name))
with open(final_log_name, "a") as final_superpmi_log:
for superpmi_log in listdir(log_directory):
if not superpmi_log.startswith("superpmi_Jit") or not superpmi_log.endswith(".log"):
continue
print("Appending {}".format(superpmi_log))
final_superpmi_log.write("======================================================={}".format(os.linesep))
final_superpmi_log.write("Contents from {}{}".format(superpmi_log, os.linesep))
final_superpmi_log.write("======================================================={}".format(os.linesep))
with open(path.join(log_directory, superpmi_log), "r") as current_superpmi_log:
contents = current_superpmi_log.read()
final_superpmi_log.write(contents)
# Log failures summary
if len(failed_runs) > 0:
final_superpmi_log.write(os.linesep)
final_superpmi_log.write(os.linesep)
final_superpmi_log.write("========Failed runs summary========".format(os.linesep))
final_superpmi_log.write(os.linesep.join(failed_runs))
return 0 if len(failed_runs) == 0 else 1
if __name__ == "__main__":
args = parser.parse_args()
sys.exit(main(args))
| 37.938356 | 118 | 0.609677 |
5b10fde1a0b02a1e7f85ed42e2bfe8b97109fa80 | 514 | py | Python | parse_cookie.py | olnikiforov/hillel | 911bb94169aa277932e346e564e5efd69073d634 | [
"MIT"
] | null | null | null | parse_cookie.py | olnikiforov/hillel | 911bb94169aa277932e346e564e5efd69073d634 | [
"MIT"
] | 1 | 2021-04-01T18:56:38.000Z | 2021-04-01T18:56:38.000Z | parse_cookie.py | olnikiforov/hillel | 911bb94169aa277932e346e564e5efd69073d634 | [
"MIT"
] | null | null | null |
if __name__ == '__main__':
assert parse_cookie('name=Dima;') == {'name': 'Dima'}
assert parse_cookie('') == {}
assert parse_cookie('name=Dima;age=28;') == {'name': 'Dima', 'age': '28'}
assert parse_cookie('name=Dima=User;age=28;') == {'name': 'Dima=User', 'age': '28'}
| 30.235294 | 87 | 0.509728 |
5b110f22e3b74f1f108abb0d9e76465e1a151a75 | 2,234 | py | Python | neuralgym/callbacks/model_saver.py | pancookie/SNPGAN_TECcompletion | 2245179db9d9c64da20a6dd7098795a1cf724ad3 | [
"MIT"
] | 1 | 2022-02-06T07:38:43.000Z | 2022-02-06T07:38:43.000Z | neuralgym/callbacks/model_saver.py | pancookie/SNPGAN_TECcompletion | 2245179db9d9c64da20a6dd7098795a1cf724ad3 | [
"MIT"
] | null | null | null | neuralgym/callbacks/model_saver.py | pancookie/SNPGAN_TECcompletion | 2245179db9d9c64da20a6dd7098795a1cf724ad3 | [
"MIT"
] | null | null | null | """model_saver"""
import os
from . import PeriodicCallback, CallbackLoc
from ..utils.logger import callback_log
| 37.864407 | 115 | 0.606088 |
5b1186da0e35b3ea68ef672cbd4ad76ad6086353 | 1,352 | py | Python | rower_monitor/boat_metrics.py | sergiomo/diy-rower-monitor | 32730025874f32015b8a582175db36cdd351ce1e | [
"Unlicense"
] | null | null | null | rower_monitor/boat_metrics.py | sergiomo/diy-rower-monitor | 32730025874f32015b8a582175db36cdd351ce1e | [
"Unlicense"
] | null | null | null | rower_monitor/boat_metrics.py | sergiomo/diy-rower-monitor | 32730025874f32015b8a582175db36cdd351ce1e | [
"Unlicense"
] | null | null | null | from .time_series import TimeSeries
| 35.578947 | 118 | 0.647929 |
5b11b42643e2e5c40307befa37ef00c0f90f66bd | 121 | py | Python | trackMe-backend/src/config.py | matth3wliuu/trackMe | 0fb22bb8adf147fb4d4ed09c5c7253d0e54bf992 | [
"MIT"
] | 1 | 2022-01-28T06:20:03.000Z | 2022-01-28T06:20:03.000Z | trackMe-backend/src/config.py | matth3wliuu/trackMe | 0fb22bb8adf147fb4d4ed09c5c7253d0e54bf992 | [
"MIT"
] | null | null | null | trackMe-backend/src/config.py | matth3wliuu/trackMe | 0fb22bb8adf147fb4d4ed09c5c7253d0e54bf992 | [
"MIT"
] | null | null | null | dbConfig = {
"user": "root",
"password": "123567l098",
"host": "localhost",
"database": "trackMe_dev"
} | 20.166667 | 29 | 0.545455 |
5b1363485151128caf183c9f6b705444acca65c5 | 136 | py | Python | src/localsrv/urls.py | vladiibine/localsrv | 7bb8fd2e08f43a1b5adef9ad17ab534a317e0a57 | [
"MIT"
] | null | null | null | src/localsrv/urls.py | vladiibine/localsrv | 7bb8fd2e08f43a1b5adef9ad17ab534a317e0a57 | [
"MIT"
] | 4 | 2015-04-28T08:20:26.000Z | 2015-06-13T06:32:31.000Z | src/localsrv/urls.py | vladiibine/localsrv | 7bb8fd2e08f43a1b5adef9ad17ab534a317e0a57 | [
"MIT"
] | 1 | 2018-03-04T20:29:27.000Z | 2018-03-04T20:29:27.000Z | from django.conf.urls import url
from .views import serve_all
urlpatterns = (
url(r'^.*$', serve_all, name="localsrv:serve_all"),
) | 22.666667 | 55 | 0.705882 |