blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9a7ffb2104315b950466ac7b9091ddd110c3f54d | fec00fda9b8b0f41db343edd01e732ce498438db | /tests/test_deepl_grpc.py | e91874759686971e32d067c6c0c976eb54d8c1df | [
"MIT"
] | permissive | ffreemt/deepl-grpc | 005f04391f41bb8c6112e8b30d249c75cca851fe | 3af6b48c5fcf321251514579e21d95ed57f076b8 | refs/heads/master | 2023-03-25T11:41:47.934664 | 2021-03-14T17:31:58 | 2021-03-14T17:31:58 | 347,364,881 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | from deepl_grpc import __version__
def test_version():
assert __version__[:3] == "0.1"
| [
"yucongo+fmt@gmail.com"
] | yucongo+fmt@gmail.com |
6bf26fdbf45870a7cf98a9ee1cce2f828ceb9159 | 2f74c4d2e5c6dd51eb3eaf0ee4b97122b26e7066 | /unit_03/07_Regular_Expressions/1-Regular_Expressions/2_basics.py | e25bd6bf9d5e8b8fb10b05377993149e0888175b | [
"MIT"
] | permissive | duliodenis/python_master_degree | c6a4ccf5d98c48cfc1efd29dfc116bf55b6b4f01 | 3ab76838ce2fc1606f28e988a3273dd27122a621 | refs/heads/master | 2020-04-14T09:03:51.863305 | 2019-07-22T23:05:19 | 2019-07-22T23:05:19 | 163,751,089 | 21 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | #
# Regular Expressions in Python: Reading Files
# Python Techdegree
#
# Created by Dulio Denis on 12/27/18.
# Copyright (c) 2018 ddApps. All rights reserved.
# ------------------------------------------------
# Challenge 1: Basics
# ------------------------------------------------
# Challenge Task 1 of 5
# Use open() to load the file "basics.txt" into
# the variable file_object.
# ------------------------------------------------
file_object = open("basics.txt")
# ------------------------------------------------
# Challenge Task 2 of 5
# Read the contents of file_object into a new
# variable named data.
data = file_object.read()
# ------------------------------------------------
# Challenge Task 3 of 5
# Now close the file_object file so it isn't taking
# up memory.
file_object.close()
# ------------------------------------------------
# Challenge Task 4 of 5
# Import re. Create an re.match() for the word "Four"
# in the data variable. Assign this to a new variable
# named first.
import re
first = re.match(r"Four", data)
# ------------------------------------------------
# Challenge Task 5 of 5
# Finally, make a new variable named liberty that is
# an re.search() for the word "Liberty" in our data
# variable.
liberty = re.search(r'Liberty', data) | [
"dulio.denis@yahoo.com"
] | dulio.denis@yahoo.com |
6494b5c825410f2141c94a906fd410931326f256 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_195/ch15_2019_03_01_13_34_25_387920.py | d755c93066e2c1f69e21bd460158126178964777 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | import math.pi
def volume_da_pizza(z,a):
y= math.pi*z**2*a
return y | [
"you@example.com"
] | you@example.com |
ad5ababfd67e9e7708f5e970d4fbad8be6e9e2db | 715a11d7b8f15694a5cc4b47ac0e3a3cfc4ffedc | /bi46/5669.py | b92cbc7001d2c8b620449542ddad67fce5979e74 | [] | no_license | mohanrajanr/CodePrep | 5cd538d16598f6a0d2486357d3cc6e0fa1626e4e | 2e23a5f996139b887bf723f58b23368cf8121cd4 | refs/heads/main | 2023-04-23T04:10:06.111120 | 2021-05-11T06:47:51 | 2021-05-11T06:47:51 | 366,283,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,024 | py | from typing import List
def canChoose(groups: List[List[int]], nums: List[int]) -> bool:
index = len(nums) - 1
hasMatched = False
while groups and nums:
tu = tuple(groups.pop())
# print("Checking :{}".format(tu))
hasMatched = False
while index - len(tu) + 1 >= 0:
values = nums[index - len(tu) + 1: index+1]
# print(values)
if tuple(values) == tu:
hasMatched = True
# print("MM")
t = len(tu)
while t:
index -= 1
nums.pop()
t -= 1
break
index -= 1
nums.pop()
# print(groups)
# print(nums)
return hasMatched and len(groups) == 0
print(canChoose([[1,-1,-1],[3,-2,0]], [1,-1,0,1,-1,-1,3,-2,0]))
print(canChoose([[10,-2],[1,2,3,4]], [1,2,3,4,10,-2]))
print(canChoose([[1,2,3],[3,4]], [1,-1,0,1,-1,-1,3,-2,0]))
print(canChoose([[1,2,3],[3,4]], [7,7,1,2,3,4,7,7])) | [
"mohanrajan1996@gmail.com"
] | mohanrajan1996@gmail.com |
1f03ac5fe75748482615a376e90900b939689d79 | f4996d975ce705c1d09fdad168cd1225bbb99dd8 | /chemex/experiments/shift/exp_help.py | c0d4c5bf67725b6e40542d32471968532318bbc0 | [
"BSD-3-Clause"
] | permissive | chriswaudby/chemex | d0d39d834989d92f738f17de737167b7f9118884 | c3424bb8891b05de43c969108311d3ea50ba3428 | refs/heads/develop | 2020-01-23T21:05:04.695331 | 2016-11-08T13:29:16 | 2016-11-08T13:29:16 | 74,578,043 | 0 | 1 | null | 2016-12-24T12:58:50 | 2016-11-23T13:11:30 | Python | UTF-8 | Python | false | false | 91 | py | """
Created on Mar 2, 2012
@author: Alex Hansen
"""
parse_line = "HSQC/HMQC experiments"
| [
"gbouvignies@gmail.com"
] | gbouvignies@gmail.com |
80acb66240a546aa1aad9df8bf32d4cf1bce398f | d5c1d1b162de12942989cb15f5a1e9e9ecf52c82 | /soladm/tests/test_autocomplete.py | 4a7ef3258a43c5de90b443fb60c34f7bd0f76935 | [] | no_license | rr-/soladm | 39730352265e41e558134fe4928ce7c9fe2c50b7 | 67f3e388144d258b861728d81664c78cd6ba2e97 | refs/heads/master | 2021-01-20T07:35:12.826423 | 2017-06-29T11:43:39 | 2017-06-29T11:43:39 | 90,016,113 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 844 | py | from typing import Tuple, Iterable
import pytest
from soladm.ui import autocomplete
@pytest.mark.parametrize('edit_text,edit_pos,affixes', [
('', 0, [('', '', '')]),
('abc', 0, [('', '', 'abc')]),
('abc def', 0, [('', '', 'abc def')]),
('abc', 2, [('', 'ab', 'c')]),
('ab ', 2, [('', 'ab', ' ')]),
(' c', 2, [('', ' ', 'c')]),
(' bc', 2, [('', ' b', 'c'), (' ', 'b', 'c')]),
('a c', 2, [('', 'a ', 'c')]),
('abc def', 5, [
('', 'abc d', 'ef'),
('abc ', 'd', 'ef'),
]),
('a c def', 5, [
('', 'a c d', 'ef'),
('a ', 'c d', 'ef'),
('a c ', 'd', 'ef'),
]),
])
def test_get_affixes(
edit_text: str,
edit_pos: int,
affixes: Iterable[Tuple[str, str, str]]) -> None:
assert autocomplete.get_affixes(edit_text, edit_pos) == affixes
| [
"rr-@sakuya.pl"
] | rr-@sakuya.pl |
af1ccc3bb73c89919f00a287764d22296201e510 | 83e0a7bae272748dadea3330514039b8658ca426 | /test/util/test_timecoord.py | 05d24029bb6500cb460cb40cacd926c57a973180 | [
"MIT"
] | permissive | achtsnits/xcube | 78d081613d71b7e13cc317fb07c297d98e6267ad | 6bc7bda849a1f2cc8cb2bba1152e0a98d4a97aa5 | refs/heads/master | 2020-08-27T20:46:43.230537 | 2019-10-16T14:24:26 | 2019-10-16T14:24:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,263 | py | import unittest
import numpy as np
from test.sampledata import create_highroc_dataset
from xcube.util.timecoord import add_time_coords, to_time_in_days_since_1970, timestamp_to_iso_string, \
from_time_in_days_since_1970
class AddTimeCoordsTest(unittest.TestCase):
def test_add_time_coords_point(self):
dataset = create_highroc_dataset()
dataset_with_time = add_time_coords(dataset, (365 * 47 + 20, 365 * 47 + 20))
self.assertIsNot(dataset_with_time, dataset)
self.assertIn('time', dataset_with_time)
self.assertEqual(dataset_with_time.time.shape, (1,))
self.assertNotIn('time_bnds', dataset_with_time)
def test_add_time_coords_range(self):
dataset = create_highroc_dataset()
dataset_with_time = add_time_coords(dataset, (365 * 47 + 20, 365 * 47 + 21))
self.assertIsNot(dataset_with_time, dataset)
self.assertIn('time', dataset_with_time)
self.assertEqual(dataset_with_time.time.shape, (1,))
self.assertIn('time_bnds', dataset_with_time)
self.assertEqual(dataset_with_time.time_bnds.shape, (1, 2))
def test_to_time_in_days_since_1970(self):
self.assertEqual(17324.5,
to_time_in_days_since_1970('201706071200'))
self.assertEqual(17325.5,
to_time_in_days_since_1970('201706081200'))
self.assertEqual(17690.5,
to_time_in_days_since_1970('2018-06-08 12:00'))
self.assertEqual(17690.5,
to_time_in_days_since_1970('2018-06-08T12:00'))
def test_from_time_in_days_since_1970(self):
self.assertEqual('2017-06-07T12:00:00.000000000',
str(from_time_in_days_since_1970(to_time_in_days_since_1970('201706071200'))))
self.assertEqual('2017-06-08T12:00:00.000000000',
str(from_time_in_days_since_1970(to_time_in_days_since_1970('201706081200'))))
self.assertEqual('2018-06-08T12:00:00.000000000',
str(from_time_in_days_since_1970(to_time_in_days_since_1970('2018-06-08 12:00'))))
self.assertEqual('2018-06-08T12:00:00.000000000',
str(from_time_in_days_since_1970(to_time_in_days_since_1970('2018-06-08T12:00'))))
class TimestampToIsoStringTest(unittest.TestCase):
def test_it_with_default_res(self):
self.assertEqual("2018-09-05T00:00:00Z",
timestamp_to_iso_string(np.datetime64("2018-09-05")))
self.assertEqual("2018-09-05T10:35:42Z",
timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42")))
self.assertEqual("2018-09-05T10:35:42Z",
timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42.164")))
def test_it_with_h_res(self):
self.assertEqual("2018-09-05T00:00:00Z",
timestamp_to_iso_string(np.datetime64("2018-09-05"), freq="H"))
self.assertEqual("2018-09-05T11:00:00Z",
timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42"), freq="H"))
self.assertEqual("2018-09-05T11:00:00Z",
timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42.164"), freq="H"))
| [
"norman.fomferra@gmail.com"
] | norman.fomferra@gmail.com |
b61b29c8c886f33bae6262e56b24e35710ff4c04 | 82319ec6aaf462f6823f43946a7f4a0624bffa20 | /Mariana/training/future/recorders.py | 33b74e2da4e68c3932c3fd599bf04a417baaee53 | [
"Apache-2.0"
] | permissive | enterstudio/Mariana | b76a382f5873f9bf83837e9f5190ab6684e14972 | 6b186d93c5fe5521603a389e975595e45e1ea5d2 | refs/heads/master | 2021-04-29T04:30:21.627507 | 2017-11-21T16:30:55 | 2017-11-21T16:30:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,305 | py | import sys, os, types
from pyGeno.tools.parsers.CSVTools import CSVFile
__all__ = ["Recorder_ABC", "GGPlot2", "SavingRule_ABC", "SaveMin", "SaveMax", "Scores"]
class Scores(object) :
"""Manage and store the scores returned by the trainer. This one is meant to be used internally by trainers."""
def __init__(self) :
self.reset()
def reset(self):
"""resets the store as if nothing ever happened"""
self.currentScores = {}
self.minScores = {}
self.maxScores = {}
self.epoch = 0
def update(self, trainerScores, epoch) :
"""update the with new scores"""
def _rec(minScores, maxScores, dct, epoch, keys = []) :
for k, v in dct.iteritems() :
if type(v) is types.DictType :
keys.append(k)
if k not in minScores :
minScores[k] = {}
maxScores[k] = {}
_rec(minScores[k], maxScores[k], v, epoch, keys = keys)
else :
try :
if v < minScores[k][0] :
minScores[k] = (v, epoch)
elif v > maxScores[k][0] :
maxScores[k] = (v, epoch)
except KeyError :
minScores[k] = (v, epoch)
maxScores[k] = (v, epoch)
self.epoch = epoch
self.currentScores = trainerScores
_rec(self.minScores, self.maxScores, self.currentScores, epoch)
def getScore(self, mapName, outputName, functionName) :
"""return the last score for a map defined in the trainer (a set) an outpur layer and function of that output"""
return (self.currentScores[mapName][outputName][functionName], self.epoch)
def getMinScore(self, mapName, outputName, functionName) :
"""return the min score acheived for a map defined in the trainer (a set) an outpur layer and function of that output"""
return self.minScores[mapName][outputName][functionName]
def getMaxScore(self, mapName, outputName, functionName) :
"""return the max score acheived for a map defined in the trainer (a set) an outpur layer and function of that output"""
return self.maxScores[mapName][outputName][functionName]
class SavingRule_ABC(object):
"""Abstraction for saving rules"""
def __init__(self, *args, **kwargs) :
pass
def _shouldISave(self, recorder) :
"""This is the function that is called by the recorder. If sets self.recorder to 'recorder' and then calls self.shouldISave()"""
self.recorder = recorder
return self.shouldISave(recorder)
def shouldISave(self, recorder) :
"""The function that defines when a save should be performed"""
raise NotImplemented("Should be implemented in child")
def getFilename(self, recorder) :
"""return the filename of the file to be saved"""
raise NotImplemented("Should be implemented in child")
def loadLast(self):
"""load the last saved model"""
import Mariana.network as MNET
return MNET.loadModel(self.getFilename(self.recorder))
def __repr__(self) :
return "%s on %s" %(self.__class__.__name__, (self.mapName, self.outputName, self.functionName) )
class SaveMin(SavingRule_ABC) :
"""Save the model when a new min value is reached
:param string mapName: The set name of a map defined in the trainer usually something like "test" or "validation"
:param string outputName: The name of the output layer to consider (you can also give the layer object)
:param string functionName: The function of the output layer to consider usually "test".
"""
def __init__(self, mapName, outputName, functionName) :
self.mapName = mapName
if type(outputName) is types.StringType :
self.outputName = outputName
else :
self.outputName = outputName.name
self.functionName = functionName
self.recorder = None
def shouldISave(self, recorder) :
s = recorder.scores.getScore(self.mapName, self.outputName, self.functionName)
m = recorder.scores.getMinScore(self.mapName, self.outputName, self.functionName)
if s[0] == m[0] :
return True
return False
def getFilename(self, recorder) :
return "bestMin-%s-%s-%s-%s" % (self.mapName, self.outputName, self.functionName, recorder.filename)
class SaveMax(SavingRule_ABC) :
"""Save the model when a new max value is reached
:param string mapName: The set name of a map defined in the trainer usually something like "test" or "validation"
:param string outputName: The name of the output layer to consider (you can also give the layer object)
:param string functionName: The function of the output layer to consider usually "test".
"""
def __init__(self, mapName, outputName, functionName) :
self.mapName = mapName
if type(outputName) is types.StringType :
self.outputName = outputName
else :
self.outputName = outputName.name
self.functionName = functionName
self.recorder = None
def shouldISave(self, recorder) :
s = recorder.scores.getScore(self.mapName, self.outputName, self.functionName)
m = recorder.scores.getMaxScore(self.mapName, self.outputName, self.functionName)
if s[0] == m[0] :
return True
return False
def getFilename(self, recorder) :
return "bestMax-%s-%s-%s-%s" % (self.mapName, self.outputName, self.functionName, recorder.filename)
class SavePeriod(SavingRule_ABC) :
"""Periodically saves the current model
:param boolean distinct: If False, each new save will overwrite the previous one.
"""
def __init__(self, period, distinct) :
SavingRule_ABC(self)
self.distinct = distinct
self.period = period
def shouldISave(self, recorder) :
return (recorder.epoch % self.period) == 0
def getFilename(self, recorder) :
if self.distinct :
return "periodicallySaved-epoch_%s-%s" % (recorder.epoch, recorder.filename)
else :
return "periodicallySaved-%s" % (recorder.filename)
class Recorder_ABC(object) :
"""A recorder is meant to be plugged into a trainer to record the
advancement of the training. This is the interface a Recorder must expose."""
def commit(self, store, model) :
"""Does something with the currenty state of the trainer's store and the model"""
raise NotImplemented("Should be implemented in child")
def __len__(self) :
"""returns the number of commits performed"""
raise NotImplemented("Should be implemented in child")
class GGPlot(Recorder_ABC):
"""This training recorder will create a nice TSV (tab delimited) file fit for using with ggplot2 and will update
it as the training goes. It will also save the best model for each set of the trainer, and print
regular reports on the console.
:param string filename: The filename of the tsv to be generated. the extension '.ggplot2.tsv' will be added automatically
:param list whenToSave: List of saving rules.
:param int printRate: The rate at which the status is printed on the console. If set to <= to 0, will never print.
:param int write: The rate at which the status is written on disk
"""
def __init__(self, filename, whenToSave = [], printRate=1, writeRate=1):
self.filename = filename.replace(".tsv", "") + ".ggplot.tsv"
self.scores = Scores()
self.csvLegend = None
self.csvFile = None
self.length = 0
self.epoch = 0
self.trainerStore = None
self.printRate = printRate
self.writeRate = writeRate
self.whenToSave = whenToSave
def commit(self, store, model) :
"""Appends the current state of the store to the CSV. This one is meant to be called by the trainer"""
def _fillLine(csvFile, score, minScore, maxScore, mapName, setLen, outputName, outputFunction, **csvValues) :
line = csvFile.newLine()
for k, v in csvValues.iteritems() :
line[k] = v
line["score"] = score[0]
line["min_score"] = minScore[0]
line["min_score_commit"] = minScore[1]
line["max_score"] = maxScore[0]
line["max_score_commit"] = maxScore[1]
line["set"] = "%s" %(mapName)
line["set_size"] = "%s" %(setLen)
line["output_layer"] = outputName
line["output_function"] = outputFunction
line.commit()
self.length += 1
if self.csvLegend is None :
self.csvLegend = store["hyperParameters"].keys()
self.csvLegend.extend(store["runInfos"].keys())
self.csvLegend.extend( ["score", "min_score", "min_score_commit", "max_score", "max_score_commit", "set", "set_size", "output_layer", "output_function"] )
self.csvFile = CSVFile(legend = self.csvLegend, separator = "\t")
self.csvFile.streamToFile( self.filename, writeRate = self.writeRate )
muchData = store["hyperParameters"]
muchData.update(store["runInfos"])
self.scores.update(store["scores"], store["runInfos"]["epoch"])
for mapName, os in store["scores"].iteritems() :
for outputName, fs in os.iteritems() :
for functionName in fs :
_fillLine(
self.csvFile,
self.scores.getScore(mapName, outputName, functionName),
self.scores.getMinScore(mapName, outputName, functionName),
self.scores.getMaxScore(mapName, outputName, functionName),
mapName,
store["setSizes"][mapName],
outputName,
functionName,
**muchData
)
self.trainerStore = store
self.epoch = store["runInfos"]["epoch"]
if self.printRate > 0 and (self.length%self.printRate) == 0:
self.printCurrentState()
for w in self.whenToSave :
if w._shouldISave(self) :
model.save(w.getFilename(self))
def printCurrentState(self) :
"""prints the current state stored in the recorder"""
if self.length > 0 :
print "\n==>rec: ggplot2, epoch %s, commit %s, pid: %s:" % (self.epoch, self.length, os.getpid())
for mapName, outs in self.scores.currentScores.iteritems() :
print " |-%s set" % mapName
for outputName, fs in outs.iteritems() :
print " |-%s" % outputName
for functionName in fs :
s = self.scores.getScore(mapName, outputName, functionName)
mi = self.scores.getMinScore(mapName, outputName, functionName)
ma = self.scores.getMaxScore(mapName, outputName, functionName)
highlight = []
if s[0] == mi[0] :
highlight.append("+min+")
else :
highlight.append("%s@%s" % (mi[0], mi[1]))
if s[0] == ma[0] :
highlight.append("+max+")
else :
highlight.append("%s@%s" % (ma[0], ma[1]))
print " |-%s: %s [%s]" % (functionName, s[0], "; ".join(highlight))
else :
print "==>rec: ggplot2, nothing to show yet"
sys.stdout.flush()
def __repr__(self):
return "<recorder: %s, filename: %s>" % (self.__class__.__name__, self.filename)
def __len__(self) :
"""returns the number of commits performed"""
return self.length
| [
"tariq.daouda@umontreal.ca"
] | tariq.daouda@umontreal.ca |
e55764e47834c1865fe67bbb512f3243934e79f4 | 256f817910dd698970fab89871c6ce66a3c416e7 | /1. solvedProblems/30. Substring with Concatenation of All Words/30.py | ce5a51cd4adedf962473dba896bff6a6a7f0783b | [] | no_license | tgaochn/leetcode | 5926c71c1555d2659f7db4eff9e8cb9054ea9b60 | 29f1bd681ae823ec6fe755c8f91bfe1ca80b6367 | refs/heads/master | 2023-02-25T16:12:42.724889 | 2021-02-04T21:05:34 | 2021-02-04T21:05:34 | 319,225,860 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,228 | py | # !/usr/bin/env python
# coding: utf-8
"""
Author:
Tian Gao (tgaochn@gmail.com)
CreationDate:
Sat, 11/28/2020, 19:54
# !! Description:
"""
import sys
from typing import List
sys.path.append('..')
from utils import binaryTree, nTree, singleLinkedList
from utils.utils import (
printMatrix,
printDict,
printList,
isMatrix,
)
ListNode = singleLinkedList.ListNode
TreeNode = binaryTree.TreeNode
Node = nTree.Node
null = None
testCaseCnt = 6
# maxFuncInputParaCnt = 8
# !! step1: replace these two lines with the given code
class Solution:
def findSubstring(self, s: str, words: List[str]) -> List[int]:
l, r = 0, 0
n = len(s)
wordLen = len(words[0])
wordCnt = len(words)
winLen = wordCnt * wordLen
self.wordFreqHash = {}
for word in words:
self.wordFreqHash.setdefault(word, 0)
self.wordFreqHash[word] += 1
from collections import deque
win = deque()
rlt = []
def isValidRlt(win, words, wordCnt, wordLen):
winWordFreqHash = {}
winString = ''.join(list(win))
for i in range(wordCnt):
curWinWord = winString[i * wordLen: i * wordLen + wordLen]
if curWinWord not in self.wordFreqHash: return False
winWordFreqHash.setdefault(curWinWord, 0)
winWordFreqHash[curWinWord] += 1
for word, freq in self.wordFreqHash.items():
winWordFreq = winWordFreqHash.get(word, 0)
if freq != winWordFreq: return False
return True
while r < n:
eleR = s[r]
win.append(eleR)
r += 1
# !! 长度不够
if r - l < winLen:
continue
if isValidRlt(win, words, wordCnt, wordLen):
# print(''.join(win))
rlt.append(l)
win.popleft()
l += 1
return rlt
# endFunc
# endClass
def func():
# !! step2: change function name
s = Solution()
myFuncLis = [
s.findSubstring,
# optional: add another function for comparison
]
onlyDisplayError = True
enableInput = [True] * testCaseCnt
input = [None] * testCaseCnt
expectedRlt = [None] * testCaseCnt
# enableInput[0] = False
# enableInput[1] = False
# enableInput[2] = False
# enableInput[3] = False
# enableInput[4] = False
# enableInput[5] = False
# !! step3: change input para, input para can be found in "run code" - "test case"
# ! para1
input[0] = (
"barfoothefoobarman",
["foo", "bar"],
# binaryTree.buildTree(None)
# singleLinkedList.buildSingleList(None)
# nTree.buildTree(None)
)
expectedRlt[0] = [0, 9]
# ! para2
input[1] = (
"wordgoodgoodgoodbestword",
["word", "good", "best", "word"],
# binaryTree.buildTree(None),
# singleLinkedList.buildSingleList(None),
# nTree.buildTree(None),
)
expectedRlt[1] = []
# ! para3
input[2] = (
"barfoofoobarthefoobarman",
["bar", "foo", "the"],
# singleLinkedList.buildSingleList(None),
# binaryTree.buildTree(None),
# nTree.buildTree(None),
)
expectedRlt[2] = [6, 9, 12]
# ! para4
input[3] = (
None
# singleLinkedList.buildSingleList(None),
# binaryTree.buildTree(None),
# nTree.buildTree(None),
)
expectedRlt[3] = None
# ! para5
input[4] = (
None
# singleLinkedList.buildSingleList(None),
# binaryTree.buildTree(None),
# nTree.buildTree(None),
)
expectedRlt[4] = None
# ! para6
input[5] = (
None
# singleLinkedList.buildSingleList(None),
# binaryTree.buildTree(None),
# nTree.buildTree(None),
)
expectedRlt[5] = None
# !! ====================================
# function and parameters count
allInput = [(input[i], enableInput[i], expectedRlt[i]) for i in range(testCaseCnt)]
if not input[0]:
print("ERROR: please assign at least one input for input[0]!")
exit()
funcParaCnt = 1 if not isinstance(input[0], tuple) else len(input[0])
funcCnt = len(myFuncLis)
# for each test case
for inputPara, enableInput, expectedRlt in allInput:
if not enableInput or not inputPara: continue
inputParaList = [None] * funcParaCnt
if not isinstance(inputPara, tuple):
inputPara = [inputPara]
for j in range(funcParaCnt):
inputParaList[j] = inputPara[j]
# for each function
for j in range(funcCnt):
print('==' * 20)
myFunc = myFuncLis[j]
# ! manually call function, max para count: 8
rlt = None
if funcParaCnt == 1:
rlt = myFunc(inputPara[0])
if funcParaCnt == 2:
rlt = myFunc(inputPara[0], inputPara[1])
if funcParaCnt == 3:
rlt = myFunc(inputPara[0], inputPara[1], inputPara[2])
if funcParaCnt == 4:
rlt = myFunc(inputPara[0], inputPara[1], inputPara[2], inputPara[3])
if funcParaCnt == 5:
rlt = myFunc(inputPara[0], inputPara[1], inputPara[2], inputPara[3], inputPara[4])
if funcParaCnt == 6:
rlt = myFunc(inputPara[0], inputPara[1], inputPara[2], inputPara[3], inputPara[4], inputPara[5])
if funcParaCnt == 7:
rlt = myFunc(inputPara[0], inputPara[1], inputPara[2], inputPara[3], inputPara[4], inputPara[5], inputPara[6])
if funcParaCnt == 8:
rlt = myFunc(inputPara[0], inputPara[1], inputPara[2], inputPara[3], inputPara[4], inputPara[5], inputPara[6], inputPara[7])
# only output when the result is not expected
if onlyDisplayError and expectedRlt is not None and expectedRlt == rlt: continue
# output function name
if funcCnt > 1:
print('func: \t%s' % myFunc.__name__)
# output para
for k in range(funcParaCnt):
para = inputParaList[k]
formatPrint('input %s:' % (k + 1), para)
# output result
print()
if not rlt:
print('rlt:\t', rlt)
else:
formatPrint('rlt:', rlt)
if expectedRlt is not None:
if not expectedRlt:
print('expRlt:\t', expectedRlt)
else:
formatPrint('expRlt:', expectedRlt)
print('==' * 20)
# endFunc
def isSpecialInstance(myInstance):
for curType in [TreeNode, Node]:
if isinstance(myInstance, curType):
return True
return False
# endFunc
def formatPrint(prefix, data):
if isMatrix(data):
print('%s' % prefix)
printMatrix(data)
else:
splitter = '\n' if isSpecialInstance(data) else '\t'
print('%s%s%s' % (prefix, splitter, data))
# endFunc
def main():
func()
# endMain
if __name__ == "__main__":
main()
# endIf
| [
"tgaochn@gmail.com"
] | tgaochn@gmail.com |
1f8bce0c11115e32a2762fd95571725acd8d8627 | a3d058c6a80d4068fa4d3185ddd2dec91abc82d7 | /190103_카펫.py | bf7c0ea85c0440df0a025dc48826d72f324705fd | [] | no_license | guard1000/Everyday-coding | d6f496654b635738a4284270f6c5d285116a760e | 7755f99cdb512d623392af82282bf17b47cb77f2 | refs/heads/master | 2021-08-18T22:26:04.322162 | 2021-07-21T14:53:28 | 2021-07-21T14:53:28 | 161,440,626 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 530 | py | def solution(brown, red):
answer = []
xlist = []
i = 0
for x in range(1, red + 1):
if red % x == 0 and x not in xlist:
xlist.append(red / x)
for x2 in xlist:
n = 1
while 2 * x2 + 2 * (red / x2) - 4 + 8 * n <= brown:
if 2 * x2 + 2 * (red / x2) - 4 + 8 * n == brown:
i = x2 + n * 2
break
n = n + 1
if i != 0:
break
answer.append(i)
answer.append((brown + red) / answer[0])
return answer | [
"cjsdnr885@naver.com"
] | cjsdnr885@naver.com |
76b3b454adef458d8f84bb8c711f378e962c4afd | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_perfectest.py | 617a0575b5947bdc969f79d6b1bbe1909e2fe462 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | py |
from xai.brain.wordbase.nouns._perfect import _PERFECT
#calss header
class _PERFECTEST(_PERFECT, ):
def __init__(self,):
_PERFECT.__init__(self)
self.name = "PERFECTEST"
self.specie = 'nouns'
self.basic = "perfect"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
0d0d265bebb6878ee92a14872f136d0cfbb06604 | f7509541ade3c2ed21ea374013905c3e475f46a4 | /src/main.py | cc75b28d10a4d0ea68fb34ccc954460ddd08bcfa | [
"CC0-1.0"
] | permissive | ytyaru/Python.Pixpeer.SelectedListItem.Key.20200513163428 | ff3ce58ead3d2245d3b6bd108ecd8a911a36ac80 | a59f16fd35d7bdc125e456e9ce1ac891f7ac22a2 | refs/heads/master | 2022-06-18T06:33:47.775400 | 2020-05-13T08:12:18 | 2020-05-13T08:12:18 | 263,566,643 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,575 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys, os, numpy, PIL
from PySide2 import QtCore, QtGui, QtWidgets
from PIL import Image, ImagePalette, ImageQt, ImageSequence
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(self.__class__, self).__init__()
self.setAcceptDrops(True)
self.setWindowTitle("pixpeer")
self.widget = Widget(self)
self.setCentralWidget(self.widget)
menu_file = QtWidgets.QMenu('File', self)
menu_file.addAction(self.widget.GraphicsView.Scene.Drawable.SaveAction)
self.menuBar().addMenu(menu_file)
menu_frame = QtWidgets.QMenu('Animation', self)
menu_frame.addAction(FrameListView.AddFrameAction)
menu_frame.addAction(FrameListView.InsertCopyFrameAction)
menu_frame.addAction(FrameListView.DeleteFrameAction)
menu_frame.addAction(FrameListView.SelectedPreviousFrameAction)
menu_frame.addAction(FrameListView.SelectedNextFrameAction)
self.menuBar().addMenu(menu_frame)
self.show()
# Frame側でも使いたいので
globals()['Window'] = self
def mousePressEvent(self, event):
super(self.__class__, self).mousePressEvent(event)
self.widget.update()
def mouseMoveEvent(self, event):
super(self.__class__, self).mouseMoveEvent(event)
self.widget.update()
def mouseReleaseEvent(self, event):
super(self.__class__, self).mouseReleaseEvent(event)
self.widget.update()
def dragEnterEvent(self, event):
super(self.__class__, self).dragEnterEvent(event)
self.widget.update()
def dragMoveEvent(self, event):
super(self.__class__, self).dragMoveEvent(event)
self.widget.update()
def dropEvent(self, event):
super(self.__class__, self).dropEvent(event)
self.widget.update()
class Widget(QtWidgets.QWidget):
def __init__(self, parent):
super(self.__class__, self).__init__(parent)
self.setAcceptDrops(True)
self.view = GraphicView()
self.animation = AnimationWidget()
# self.animation.setMinimumHeight(self.animation.height())
# self.animation.setMaximumHeight(self.animation.height()*1.2)
self.animation.setMinimumHeight(32)
self.animation.setMaximumHeight(64)
self.animation.resize(self.animation.width(), self.animation.height())
globals()['AnimationWidget'] = self.animation
scroller1 = QtWidgets.QScrollArea()
scroller1.setWidget(self.view)
layout = QtWidgets.QGridLayout()
layout.addWidget(scroller1, 0, 0)
layout.addWidget(self.animation, 1, 0)
self.setLayout(layout)
self.resize(self.view.width(), self.view.height())
self.show()
@property
def GraphicsView(self): return self.view
def mousePressEvent(self, event):
super(self.__class__, self).mousePressEvent(event)
self.view.scene().update()
self.view.update()
# self.animation.mousePressEvent(event)
# self.animation.update()
def mouseMoveEvent(self, event):
super(self.__class__, self).mouseMoveEvent(event)
self.view.scene().update()
self.view.update()
# self.animation.mouseMoveEvent(event)
# self.animation.update()
def mouseReleaseEvent(self, event):
super(self.__class__, self).mouseReleaseEvent(event)
self.view.scene().update()
self.view.update()
# self.animation.mouseReleaseEvent(event)
# self.animation.update()
def dragEnterEvent(self, event):
super(self.__class__, self).dragEnterEvent(event)
self.view.dragEnterEvent(event)
self.view.scene().update()
self.view.update()
# self.animation.dragEnterEvent(event)
# self.animation.update()
def dragMoveEvent(self, event):
super(self.__class__, self).dragMoveEvent(event)
self.view.scene().update()
self.view.update()
# self.animation.dragMoveEvent(event)
# self.animation.update()
def dropEvent(self, event):
super(self.__class__, self).dropEvent(event)
self.view.scene().update()
self.view.update()
self.animation.dropEvent(event)
# self.animation.update()
class GraphicView(QtWidgets.QGraphicsView):
def __init__(self):
QtWidgets.QGraphicsView.__init__(self)
self.setAcceptDrops(True)
# self.setWindowTitle("QGraphicsScene draw Grid")
self.__editorScene = EditorScene(self)
self.setScene(self.__editorScene)
def mousePressEvent(self, event):
super(self.__class__, self).mousePressEvent(event)
self.scene().update()
def mouseMoveEvent(self, event):
super(self.__class__, self).mouseMoveEvent(event)
self.scene().update()
def mouseReleaseEvent(self, event):
super(self.__class__, self).mouseReleaseEvent(event)
self.scene().update()
@property
def Scene(self): return self.__editorScene
def dragEnterEvent(self, event):
super(self.__class__, self).dragEnterEvent(event)
self.scene().update()
def dragEnterEvent(self, event):
super(self.__class__, self).dragMoveEvent(event)
self.scene().update()
def dropEvent(self, event):
super(self.__class__, self).dropEvent(event)
self.scene().update()
class EditorScene(QtWidgets.QGraphicsScene):
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.size = 16
self.scale = 32
self.setSceneRect(0, 0, self.size*self.scale, self.size*self.scale)
self.grid = GridItem()
self.addItem(self.grid)
self.background = BackgroundItem()
self.addItem(self.background)
self.drawable = DrawableItem()
self.addItem(self.drawable)
self.background.setZValue(0)
self.drawable.setZValue(1)
self.grid.setZValue(9999)
# Frame側でも使いたいので
globals()['Drawable'] = self.drawable
def mousePressEvent(self, event):
for item in self.items():
item.mousePressEvent(event)
super(self.__class__, self).mousePressEvent(event)
def mouseMoveEvent(self, event):
for item in self.items():
item.setAcceptHoverEvents(True)
item.mouseMoveEvent(event)
super(self.__class__, self).mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
for item in self.items():
item.mouseReleaseEvent(event)
super(self.__class__, self).mousePressEvent(event)
def dragEnterEvent(self, event):
for item in self.items():
item.setAcceptDrops(True)
if event is type(QtWidgets.QGraphicsSceneDragDropEvent):
item.dragEnterEvent(event)
if event is type(QtWidgets.QGraphicsSceneDragDropEvent):
super(self.__class__, self).dragEnterEvent(event)
def dragMoveEvent(self, event):
for item in self.items():
item.setAcceptDrops(True)
if event is type(QtWidgets.QGraphicsSceneDragDropEvent):
item.dragEnterEvent(event)
if event is type(QtWidgets.QGraphicsSceneDragDropEvent):
super(self.__class__, self).dragEnterEvent(event)
def dropEvent(self, event):
for item in self.items():
item.setAcceptDrops(True)
item.dropEvent(event)
if event is type(QtWidgets.QGraphicsSceneDragDropEvent):
super(self.__class__, self).dropEvent(event)
@property
def Grid(self): return self.grid
@property
def Background(self): return self.background
@property
def Drawable(self): return self.drawable
class DrawableItem(QtWidgets.QGraphicsRectItem):
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.setAcceptDrops(True)
self.setAcceptHoverEvents(True)
self.scale = 32
self.pixels = Pixels()
self.actions = {}
self.__create_save_action()
img = QtGui.QImage(self.pixels.Width, self.pixels.Height, QtGui.QImage.Format_ARGB32)
img.fill(QtGui.QColor(0,0,0,0))
# self.pixmap = QtGui.QPixmap(self.pixels.Width, self.pixels.Height)
self.pixmap = QtGui.QPixmap.fromImage(img)
print('Alpha:', self.pixmap.hasAlpha())
self.__draw_pos = []
self.freehand = FreeHand()
def __create_save_action(self):
a = QtWidgets.QAction('Save')
a.setObjectName('Save')
a.setShortcut('Ctrl+S')
a.triggered.connect(self.Pixels.save)
self.actions['Save'] = a
def paint(self, painter, option, widget):
painter.drawPixmap(0, 0, self.pixels.Width*self.scale, self.pixels.Height*self.scale, self.pixmap)
def mouseMoveEvent(self, event):
pos = event.scenePos()
x = int(pos.x()//self.scale)
y = int(pos.y()//self.scale)
x = max(0, x)
y = max(0, y)
x = min(x, self.pixels.Width-1)
y = min(y, self.pixels.Height-1)
if event.buttons() & QtCore.Qt.LeftButton:
self.freehand.Color = QtGui.QColor(255,0,0)
self.freehand.draw(self.pixmap, x, y)
self.__update_frame_list(event)
if event.buttons() & QtCore.Qt.RightButton:
self.freehand.Color = QtGui.QColor(0,0,0,0)
self.freehand.draw(self.pixmap, x, y)
self.__update_frame_list(event)
def mousePressEvent(self, event):
pos = event.scenePos()
x = int(pos.x()//self.scale)
y = int(pos.y()//self.scale)
x = max(0, x)
y = max(0, y)
x = min(x, self.pixels.Width-1)
y = min(y, self.pixels.Height-1)
if event.buttons() & QtCore.Qt.LeftButton:
self.freehand.Color = QtGui.QColor(255,0,0)
self.freehand.draw(self.pixmap, x, y)
self.__update_frame_list(event)
if event.buttons() & QtCore.Qt.RightButton:
self.freehand.Color = QtGui.QColor(0,0,0,0)
self.freehand.draw(self.pixmap, x, y)
self.__update_frame_list(event)
def mouseReleaseEvent(self, event):
pos = event.scenePos()
x = int(pos.x()//self.scale)
y = int(pos.y()//self.scale)
x = max(0, x)
y = max(0, y)
x = min(x, self.pixels.Width-1)
y = min(y, self.pixels.Height-1)
self.freehand.draw(self.pixmap, x, y)
self.freehand.points.clear()
self.__update_frame_list(event)
def __update_frame_list(self, event):
for idx in FrameListView.selectedIndexes():
# 再描画。なぜかマウスイベントを発火すると再描画される。なぜかリストの先頭項目が選択されるため後でsetCurrentIndexすべし。
FrameListView.mouseMoveEvent(QtGui.QMouseEvent(QtCore.QEvent.MouseMove, event.pos(), event.button(), event.buttons(), QtCore.Qt.NoModifier))
FrameListView.Model.update_pixmap(idx, self.pixmap)
FrameListView.setCurrentIndex(FrameListView.Model.index(idx.row(),0))
def mouseDoubleClickEvent(self, event):
pass
@property
def Pixels(self): return self.pixels
@Pixels.setter
def Pixels(self, value):
for y in range(value.Height):
for x in range(value.Width):
self.pixels.Pixels[y][x] = value.Pixels[y][x]
@property
def SaveAction(self): return self.actions['Save']
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.acceptProposedAction()
def dropEvent(self, event):
for url in event.mimeData().urls():
file_name = url.toLocalFile()
print("Dropped file: " + file_name)
self.Pixels.load(file_name)
class FreeHand:
def __init__(self, *args, **kwargs):
self.points = []
self.color = QtGui.QColor(255,0,0,0)
@property
def Color(self): return self.color
@Color.setter
def Color(self, value):
if isinstance(value, QtGui.QColor):
self.color = value
def draw(self, pixmap, x, y):
self.points.append((x,y))
painter = QtGui.QPainter(pixmap)
# painter.setBrush(QtGui.QBrush(QtGui.QColor(255,0,0,128), QtCore.Qt.SolidPattern))
painter.setBrush(self.color)
painter.setPen(self.color)
painter.setCompositionMode(QtGui.QPainter.CompositionMode_Source)
# painter.fillRect(pixmap.rect(), QtCore.Qt.transparent);
# painter.setCompositionMode(QtGui.QPainter.CompositionMode_SourceOver);
if 1 == len(self.points):
painter.drawLine(self.points[0][0], self.points[0][1], self.points[0][0], self.points[0][1])
elif 1 < len(self.points):
for i in range(len(self.points)-1):
painter.drawLine(self.points[i][0], self.points[i][1], self.points[i+1][0], self.points[i+1][1])
print(self.points[i][0], self.points[i][1], self.points[i+1][0], self.points[i+1][1])
painter.end()
class BackgroundItem(QtWidgets.QGraphicsRectItem):
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.size = 16
self.scale = 32
self.colors = [QtGui.QColor(196,196,196,255), QtGui.QColor(232,232,232,255)]
def paint(self, painter, option, widget):
for i in range(self.size*self.size):
x = (i % self.size)
y = (i // self.size)
color = QtGui.QColor(128,128,128,255) if 0 == (i % 2) and 0 == (x % 2) else QtGui.QColor(196,196,196,255)
painter.fillRect(x * (self.scale), y * (self.scale), self.scale//2, self.scale//2, self.colors[0])
painter.fillRect(x * (self.scale)+self.scale//2, y * (self.scale)+self.scale//2, self.scale//2, self.scale//2, self.colors[0])
painter.fillRect(x * (self.scale)+self.scale//2, y * (self.scale), self.scale//2, self.scale//2, self.colors[1])
painter.fillRect(x * (self.scale), y * (self.scale)+self.scale//2, self.scale//2, self.scale//2, self.colors[1])
class GridItem(QtWidgets.QGraphicsRectItem):
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.size = 16
self.scale = 32
def paint(self, painter, option, widget):
painter.fillRect(widget.rect(), QtGui.QBrush(QtGui.QColor(0,0,0,0), QtCore.Qt.SolidPattern))
lines = []
for y in range(self.size+1):
lines.append(QtCore.QLine(0, y*self.scale, self.size*self.scale, y*self.scale))
for x in range(self.size+1):
lines.append(QtCore.QLine(x*self.scale, 0, x*self.scale, self.size*self.scale))
painter.drawLines(lines)
class Pixels:
def __init__(self):
self.width = 16
self.height = 16
self.pixels = numpy.zeros(self.width*self.height, dtype=int).reshape(self.height, self.width)
@property
def Pixels(self): return self.pixels
@property
def Width(self): return self.width
@property
def Height(self): return self.height
def save(self):
print(os.getcwd())
self.save_txt()
for ext in ('gif', 'png', 'webp'):
self.save_raster(ext)
for ext in ('gif', 'png', 'webp'):
self.save_animation(ext)
def load(self, file_path):
ext = os.path.splitext(file_path)[1].lower()[1:]
if '' == ext: raise Exception('拡張子が必要です。png,gif,webp,txt形式のいずれかに対応しています。')
elif 'txt' == ext: self.load_txt(file_path)
elif 'gif' == ext: self.load_gif(file_path)
elif 'png' == ext: self.load_png(file_path)
elif 'webp' == ext: self.load_webp(file_path)
else: raise Exception('拡張子が未対応です。png,gif,webp,txt形式のいずれかに対応しています。')
def save_txt(self):
with open(os.path.join(os.getcwd(), 'pixels.txt'), 'w') as f:
idx = FrameListView.selectedIndexes()
image = FrameListView.Model.Frames[idx[0].row()].pixmap.toImage()
pixels = [0 if 0 == image.pixel(x, y) else 1 for y in range(image.height()) for x in range(image.width())]
pixels = numpy.array(pixels).reshape(image.height(), image.width())
f.write('\n'.join([''.join(map(str, pixels[y].tolist())) for y in range(image.height())]))
def load_txt(self, file_path):
with open(file_path, 'r') as f:
lines = f.read().split('\n')
self.height = len(lines)
self.width = len(lines[0])
self.pixels = numpy.zeros(self.width*self.height, dtype=int).reshape(self.height, self.width)
x = 0; y = 0;
for line in lines:
for c in line:
self.pixels[y][x] = int(c, 16)
x += 1
y += 1
x = 0
idx = FrameListView.selectedIndexes()
image = Image.new('P', (self.width, self.height))
pixels = numpy.array(self.pixels).reshape(image.width * image.height)
image.putdata(pixels.tolist())
image.putpalette([0,0,0,255,255,255])
image.putalpha(image.convert('1'))
palette = image.getpalette()
palette[3] = 255
palette[4] = 0
palette[5] = 0
image.putpalette(palette)
FrameListView.Model.appendRow(QtGui.QPixmap.fromImage(ImageQt.ImageQt(image.convert('RGBA'))))
def save_raster(self, ext):
for index in FrameListView.selectedIndexes():
qimg = FrameListView.Model.Frames[index.row()].pixmap.toImage()
if 'webp' == ext:
image = Image.new('PA', (self.width, self.height))
image.putdata([qimg.pixel(x,y) for y in range(qimg.width()) for x in range(qimg.height())])
print(image.getpalette())
palette = image.getpalette()
palette[0] = 255
palette[1] = 0
palette[2] = 0
palette = palette[:3]
image.putpalette(palette)
else:
image = Image.new('P', (self.width, self.height))
image.putdata([0 if 0 == qimg.pixel(x,y) else 1 for y in range(qimg.width()) for x in range(qimg.height())])
palette = image.getpalette()
palette[3] = 255
palette[4] = 0
palette[5] = 0
palette = palette[:6]
image.putpalette(palette)
print(ext)
p = {}
p['optimize'] = True
p['lossless'] = True
p['transparency'] = 0
if 'gif' == ext: p['disposal'] = 2
if 'webp' == ext: p['background'] = (0,0,0,0)
image.save(os.path.join(os.getcwd(), 'pixels.' + ext), **p)
def save_animation(self, ext):
print(ext)
if len(FrameListView.Model.Frames) < 2: return
images = []
print('save', len(FrameListView.Model.Frames))
for frame in FrameListView.Model.Frames:
mode = 'PA' if 'webp' == ext else 'P'
image = Image.new(mode, (frame.Pixels.Width, frame.Pixels.Height))
qimg = frame.pixmap.toImage()
if 'webp' == ext:
image.putdata([qimg.pixel(x,y) for y in range(qimg.width()) for x in range(qimg.height())])
else:
image.putdata([0 if 0 == qimg.pixel(x,y) else 1 for y in range(frame.pixmap.width()) for x in range(frame.pixmap.height())])
if 'webp' == ext:
palette = image.getpalette()
palette[0] = 255
palette[1] = 0
palette[2] = 0
palette = palette[:3]
image.putpalette(palette)
else:
palette = image.getpalette()
palette[3] = 255
palette[4] = 0
palette[5] = 0
palette = palette[:6]
image.putpalette(palette)
images.append(image)
p = {}
p['save_all'] = True
p['append_images'] = images
p['duration'] = AnimationDurationSetDialog.Duration
p['loop'] = AnimationDurationSetDialog.Loop
p['optimize'] = False
p['transparency'] = 0
if 'gif' == ext: p['disposal'] = 2
if 'webp' == ext: p['background'] = (0,0,0,0)
image.save(os.path.join(os.getcwd(), 'animation.' + ext), **p)
def load_png(self, file_path):
image = Image.open(file_path, mode='r')
frames = [frame.copy() for frame in ImageSequence.Iterator(image)]
if 1 < len(frames): frames = frames[1:] # なぜか0番目に最後のフレームが入っているため飛ばす
for frame in frames:
img = frame.convert('RGBA') # ValueError: unsupported image mode PA
FrameListView.Model.appendRow(QtGui.QPixmap.fromImage(ImageQt.ImageQt(img)))
def load_gif(self, file_path): # 値が0/255で出力されてしまうので0/1に変換する
with Image.open(file_path, mode='r') as image:
frames = [frame.copy() for frame in ImageSequence.Iterator(image)]
if 1 < len(frames): frames = frames[1:] # なぜか0番目に最後のフレームが入っているため飛ばす
for frame in frames:
img = frame.convert('RGBA') # ValueError: unsupported image mode PA
FrameListView.Model.appendRow(QtGui.QPixmap.fromImage(ImageQt.ImageQt(img)))
def load_webp(self, file_path): # 値が[0,0,0]/[255,255,255]で出力されてしまうので0/1に変換する
with Image.open(file_path, mode='r') as image:
frames = [frame.copy() for frame in ImageSequence.Iterator(image)]
if 1 < len(frames): frames = frames[1:] # なぜか0番目に最後のフレームが入っているため飛ばす
for frame in frames:
print(True if 'default_image' in frame.info else False)
img = frame.convert('RGBA') # ValueError: unsupported image mode PA
FrameListView.Model.appendRow(QtGui.QPixmap.fromImage(ImageQt.ImageQt(img)))
class AnimationWidget(QtWidgets.QWidget):
def __init__(self, parent=None):
super(self.__class__, self).__init__(parent)
self.frame_list = FrameListView()
globals()['FrameListView'] = self.frame_list
self.label = AnimationLabel()
globals()['AnimationLabel'] = self.label
layout = QtWidgets.QBoxLayout(QtWidgets.QBoxLayout.LeftToRight)
layout.addWidget(self.label)
layout.addWidget(self.frame_list)
self.setLayout(layout)
@property
def Label(self): return self.label
@property
def FrameListView(self): return self.frame_list
class AnimationLabel(QtWidgets.QLabel):
def __init__(self, parent=None):
super(self.__class__, self).__init__(parent)
self.__is_stop = True
self.__frame_index = 0
self.start_animation()
def mousePressEvent(self, event):
if event.buttons() & QtCore.Qt.LeftButton:
self.__is_stop = not self.__is_stop
print('is_stop:', self.__is_stop)
self.start_animation()
def start_animation(self):
self.setPixmap(FrameListView.Model.Frames[self.__frame_index].Icon.pixmap(16,16))
if not self.__is_stop:
if self.__frame_index < FrameListView.Model.rowCount()-1: self.__frame_index += 1
else: self.__frame_index = 0
QtCore.QTimer.singleShot(AnimationDurationSetDialog.Duration, self.start_animation)
class FrameListView(QtWidgets.QListView):
def __init__(self, parent=None):
super(self.__class__, self).__init__(parent)
self.resizeContents(16*16, 16)
self.model = FrameListModel()
self.model.appendRow()
self.setModel(self.model)
globals()['FrameListModel'] = self.model
self.resize(16*32, 32)
self.actions = {}
self.__create_add_frame_action()
self.__create_delete_frame_action()
self.__create_insert_copy_frame_action()
self.__create_selected_previous_frame_action()
self.__create_selected_next_frame_action()
self.setCurrentIndex(self.model.index(0,0))
self.setFlow(QtWidgets.QListView.LeftToRight)
self.duration_dialog = AnimationDurationSetDialog()
globals()['AnimationDurationSetDialog'] = self.duration_dialog
self.show()
def mouseMoveEvent(self, event):
super(self.__class__, self).mousePressEvent(event)
def mousePressEvent(self, event):
super(self.__class__, self).mousePressEvent(event)
for idx in self.selectedIndexes():
frame = idx.data(QtCore.Qt.UserRole)
Drawable.Pixels = frame.Pixels
Drawable.pixmap = frame.pixmap
Window.widget.view.scene().update()
if event.buttons() & QtCore.Qt.RightButton:
self.duration_dialog.show()
def update_pixmap(self, pixmap):
for idx in self.selectedIndexes():
self.model.update_pixmap(idx, pixmap)
@property
def Model(self): return self.model
@property
def AddFrameAction(self): return self.actions['AddFrame']
@property
def InsertCopyFrameAction(self): return self.actions['InsertCopyFrame']
@property
def DeleteFrameAction(self): return self.actions['DeleteFrame']
@property
def SelectedPreviousFrameAction(self): return self.actions['SelectedPreviousFrame']
@property
def SelectedNextFrameAction(self): return self.actions['SelectedNextFrame']
def __create_add_frame_action(self):
a = QtWidgets.QAction('Add frame')
a.setObjectName('AddFrame')
a.setShortcut('Ins')
a.triggered.connect(self.__add_frame)
self.actions['AddFrame'] = a
def __create_insert_copy_frame_action(self):
a = QtWidgets.QAction('Insert copy frame')
a.setObjectName('InsertCopyFrame')
a.setShortcut('Alt+Ins')
a.triggered.connect(self.__insert_copy_frame)
self.actions['InsertCopyFrame'] = a
def __create_delete_frame_action(self):
a = QtWidgets.QAction('Delete frame')
a.setObjectName('DeleteFrame')
a.setShortcut('Del')
a.triggered.connect(self.__delete_frame)
self.actions['DeleteFrame'] = a
def __create_selected_previous_frame_action(self):
a = QtWidgets.QAction('Selected next frame')
a.setObjectName('SelectedPreviousFrame')
a.setShortcut('Alt+Left')
a.triggered.connect(self.__selected_previous_frame)
self.actions['SelectedPreviousFrame'] = a
def __create_selected_next_frame_action(self):
a = QtWidgets.QAction('Selected next frame')
a.setObjectName('SelectedNextFrame')
a.setShortcut('Alt+Right')
a.triggered.connect(self.__selected_next_frame)
self.actions['SelectedNextFrame'] = a
def __add_frame(self):
self.model.appendRow()
self.setCurrentIndex(self.model.index(len(self.model.Frames)-1,0))
self.__show_drawable()
def __insert_new_frame(self):
idx = self.selectedIndexes()
self.model.insertRow(idx.row())
self.__show_drawable()
def __insert_copy_frame(self):
idx = self.selectedIndexes()
self.model.insertRow(idx[0].row(), self.model.Frames[idx[0].row()].pixmap.copy())
self.__show_drawable()
def __selected_previous_frame(self):
idx = self.selectedIndexes()
if 0 < idx[0].row():
self.setCurrentIndex(self.model.index(idx[0].row()-1,0))
self.__show_drawable()
def __selected_next_frame(self):
idx = self.selectedIndexes()
if idx[0].row() < self.model.rowCount()-1:
self.setCurrentIndex(self.model.index(idx[0].row()+1,0))
self.__show_drawable()
def __delete_frame(self):
if len(self.model.Frames) < 2: return
for idx in self.selectedIndexes():
if idx.row() == self.model.rowCount()-1:
self.setCurrentIndex(self.model.index(idx.row()-1,0))
else:
self.setCurrentIndex(self.model.index(idx.row(),0))
self.model.removeRow(idx)
self.__show_drawable()
def __show_drawable(self):
for idx in self.selectedIndexes():
frame = idx.data(QtCore.Qt.UserRole)
Drawable.pixmap = frame.pixmap.copy()
Window.widget.view.scene().update()
class FrameListModel(QtCore.QAbstractListModel):
def __init__(self, parent=None):
super(self.__class__, self).__init__(parent)
self.frames = []
def rowCount(self, parent=QtCore.QModelIndex()):
if parent.isValid(): return 0
return len(self.frames)
def data(self, index, role=QtCore.Qt.DisplayRole):
if role == QtCore.Qt.DecorationRole:
return self.frames[index.row()].Icon
elif role == QtCore.Qt.UserRole:
return self.frames[index.row()]
def appendRow(self, pixmap=None):
self.beginInsertRows(QtCore.QModelIndex(), self.rowCount(), self.rowCount())
self.frames.append(Frame(pixmap))
self.endInsertRows()
def insertRow(self, index, pixmap=None):
self.beginInsertRows(QtCore.QModelIndex(), index, index)
self.frames.insert(index, Frame(pixmap))
self.endInsertRows()
def removeRow(self, index):
self.beginRemoveRows(QtCore.QModelIndex(), self.rowCount(), self.rowCount())
print(index.row())
self.frames.pop(index.row())
self.endRemoveRows()
def update_pixmap(self, index, pixmap):
self.frames[index.row()].update_pixmap(pixmap)
@property
def Frames(self): return self.frames
class Frame:
def __init__(self, pixmap=None):
self.pixels = Pixels()
if pixmap: self.pixmap = pixmap
else:
img = QtGui.QImage(self.pixels.Width, self.pixels.Height, QtGui.QImage.Format_ARGB32)
img.fill(QtGui.QColor(0,0,0,0))
self.pixmap = QtGui.QPixmap.fromImage(img)
self.icon = QtGui.QImage(self.pixels.Width, self.pixels.Height, QtGui.QImage.Format_Mono)
self.update_pixmap(self.pixmap)
def __init_pixmap(self):
painter = QtGui.QPainter(self.pixmap)
painter.setBrush(QtGui.QBrush(QtGui.QColor(0,0,0), QtCore.Qt.SolidPattern))
painter.fillRect(self.pixmap.rect(), QtGui.QColor(0,0,0))
painter.end()
def update_pixmap(self, pixmap):
self.pixmap = pixmap
self.icon = QtGui.QIcon(pixmap)
@property
def Pixels(self): return self.pixels
@Pixels.setter
def Pixels(self, value): self.pixels = value
@property
def Icon(self): return self.icon
@Icon.setter
def Icon(self, value): self.icon = value
class AnimationDurationSetDialog(QtWidgets.QDialog):
def __init__(self, parent=None):
super(self.__class__, self).__init__(parent)
self.setWindowTitle("Set duration")
self.duration = QtWidgets.QSpinBox()
self.loop = QtWidgets.QSpinBox()
self.duration.setMinimum(0)
self.duration.setMaximum(1000*60*60*24)
self.duration.setSingleStep(1)
self.loop.setMinimum(0)
self.loop.setMaximum(2**30)
self.loop.setSingleStep(1)
self.duration.setValue(100)
self.loop.setValue(0)
layout = QtWidgets.QFormLayout()
layout.addRow("Duration", self.duration)
layout.addRow("Loop time", self.loop)
self.setLayout(layout)
self.x = self.geometry().x()
self.y = self.geometry().y()
self.w = 0
self.h = 0
def show(self):
self.setGeometry(self.x, self.y, self.w, self.h)
super(self.__class__, self).show()
def keyPressEvent(self, event):
if event.key() == QtCore.Qt.Key_Escape:
self.x = self.geometry().x()
self.y = self.geometry().y()
self.w = self.geometry().width()
self.h = self.geometry().height()
super(self.__class__, self).keyPressEvent(event)
@property
def Duration(self): return self.duration.value()
@property
def Loop(self): return self.loop.value()
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
window = Window()
sys.exit(app.exec_())
| [
"yttry0@gmail.com"
] | yttry0@gmail.com |
af430d9d37b5e7b94a395f08873779b0f5b342ff | b69fae458c07235d6106b10efec37ad427982365 | /authome/test_authorization_cache.py | 0e301ed34e20c681f86db4d969f53bde718fc7b4 | [
"Apache-2.0"
] | permissive | dbca-wa/authome | 70c782e6eeee8c3cb26c9ac91431ce80968b503d | 9af33ac81db86a87770fb18b2f26b74d9636dafb | refs/heads/master | 2023-08-24T05:43:40.438862 | 2023-08-14T13:27:31 | 2023-08-14T13:27:31 | 76,003,185 | 5 | 6 | NOASSERTION | 2023-08-14T13:27:33 | 2016-12-09T05:00:08 | Python | UTF-8 | Python | false | false | 11,885 | py | # -*- coding: utf-8 -*-
from datetime import timedelta
from django.conf import settings
from django.utils import timezone
from django.test import TestCase, Client
from .models import UserGroup,UserGroupAuthorization,UserAuthorization,can_access
from .cache import cache,HourListTaskRunable
from .basetest import BaseAuthCacheTestCase
class AuthorizationCacheTestCase(BaseAuthCacheTestCase):
def test_authorize(self):
test_datas = [
#0
(
[
("usergroup","add",["all_user",None,["*@*.*"],None]),
("usergroupauthorization","add",["all_user","*",None,["*"]]),
],
[
("hacker1@hacker.com","gunfire.com","/register",False),
("hacker1@hacker.com","map.gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/about",False),
("staff_1@gunfire.com.au","gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/unregister",False),
("staff_1@gunfire.com","gunfire.com","/about",False),
("staff_1@gunfire.com","gunfire.com","/register",False),
("staff_1@gunfire.com","gunfire.com","/unregister",False),
("dev_1@gunfire.com","gunfire.com","/about",False),
("dev_1@gunfire.com","gunfire.com","/register",False),
("dev_1@gunfire.com","gunfire.com","/unregister",False),
("dev_1@gunfire.com","gunfire.com","/start",False),
("dev_1@gunfire.com","gunfire.com","/shutdown",False),
("dev_2@gunfire.com","gunfire.com","/about",False),
("dev_2@gunfire.com","gunfire.com","/register",False),
("dev_2@gunfire.com","gunfire.com","/unregister",False),
("dev_2@gunfire.com","gunfire.com","/start",False),
("dev_2@gunfire.com","gunfire.com","/shutdown",False)
]
),
#1
(
[
("usergroupauthorization","add",["all_user","game.gunfire.com",None,None]),
("usergroupauthorization","add",["all_user","gunfire.com",None,["/register","/start","/shutdown"]])
],
[
("hacker1@hacker.com","gunfire.com","/register",False),
("hacker1@hacker.com","map.gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/about",True),
("staff_1@gunfire.com.au","gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/unregister",True),
("staff_1@gunfire.com","gunfire.com","/about",True),
("staff_1@gunfire.com","gunfire.com","/register",False),
("staff_1@gunfire.com","gunfire.com","/unregister",True),
("dev_1@gunfire.com","gunfire.com","/about",True),
("dev_1@gunfire.com","gunfire.com","/register",False),
("dev_1@gunfire.com","gunfire.com","/unregister",True),
("dev_1@gunfire.com","gunfire.com","/start",False),
("dev_1@gunfire.com","gunfire.com","/shutdown",False),
("dev_2@gunfire.com","gunfire.com","/about",True),
("dev_2@gunfire.com","gunfire.com","/register",False),
("dev_2@gunfire.com","gunfire.com","/unregister",True),
("dev_2@gunfire.com","gunfire.com","/start",False),
("dev_2@gunfire.com","gunfire.com","/shutdown",False)
]
),
#2
(
[
("usergroup","add",["gunfire","all_user",["@gunfire.com"],None]),
("usergroup","add",["dev","gunfire",["dev_*@gunfire.com"],None]),
("usergroupauthorization","add",["gunfire","gunfire.com",None,["/register","/unregister","/start","/shutdown"]]),
("usergroupauthorization","add",["dev","gunfire.com",None,["/unregister","/shutdown"]])
],
[
("hacker1@hacker.com","gunfire.com","/register",False),
("hacker1@hacker.com","map.gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/about",True),
("staff_1@gunfire.com.au","gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/unregister",True),
("staff_1@gunfire.com","gunfire.com","/about",True),
("staff_1@gunfire.com","gunfire.com","/register",False),
("staff_1@gunfire.com","gunfire.com","/unregister",False),
("dev_1@gunfire.com","gunfire.com","/about",True),
("dev_1@gunfire.com","gunfire.com","/register",True),
("dev_1@gunfire.com","gunfire.com","/unregister",False),
("dev_1@gunfire.com","gunfire.com","/start",True),
("dev_1@gunfire.com","gunfire.com","/shutdown",False),
("dev_2@gunfire.com","gunfire.com","/about",True),
("dev_2@gunfire.com","gunfire.com","/register",True),
("dev_2@gunfire.com","gunfire.com","/unregister",False),
("dev_2@gunfire.com","gunfire.com","/start",True),
("dev_2@gunfire.com","gunfire.com","/shutdown",False)
]
),
#3
(
[
("usergroup","delete",["dev"]),
("usergroup","delete",["gunfire"]),
],
[
("hacker1@hacker.com","gunfire.com","/register",False),
("hacker1@hacker.com","map.gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/about",True),
("staff_1@gunfire.com.au","gunfire.com","/register",False),
("staff_1@gunfire.com.au","gunfire.com","/unregister",True),
("staff_1@gunfire.com","gunfire.com","/about",True),
("staff_1@gunfire.com","gunfire.com","/register",False),
("staff_1@gunfire.com","gunfire.com","/unregister",True),
("dev_1@gunfire.com","gunfire.com","/about",True),
("dev_1@gunfire.com","gunfire.com","/register",False),
("dev_1@gunfire.com","gunfire.com","/unregister",True),
("dev_1@gunfire.com","gunfire.com","/start",False),
("dev_1@gunfire.com","gunfire.com","/shutdown",False),
("dev_2@gunfire.com","gunfire.com","/about",True),
("dev_2@gunfire.com","gunfire.com","/register",False),
("dev_2@gunfire.com","gunfire.com","/unregister",True),
("dev_2@gunfire.com","gunfire.com","/start",False),
("dev_2@gunfire.com","gunfire.com","/shutdown",False)
]
),
]
index = -1
for testconfigs,testcases in test_datas:
index += 1
for table,action,configdata in testconfigs:
if table == "usergroup":
if action == "add":
obj = UserGroup(
name=configdata[0],
groupid=configdata[0],
parent_group=UserGroup.objects.get(name=configdata[1]) if configdata[1] else None,
users=configdata[2],
excluded_users=configdata[3]
)
obj.clean()
obj.save()
elif action == "update":
obj = UserGroup.objects.get(name=configdata[0])
obj.parent_group=UserGroup.objects.get(name=configdata[1]) if configdata[1] else None
obj.users = configdata[2]
obj.excluded_users = configdata[3]
obj.clean()
obj.save()
elif action == "delete":
UserGroup.objects.filter(name=configdata[0]).delete()
else:
raise Exception("Unknown action '{1}' for table '{0}'".format(table,action))
elif table == "usergroupauthorization":
usergroup = UserGroup.objects.get(name=configdata[0])
if action == "add":
obj = UserGroupAuthorization(
usergroup=usergroup,
domain=configdata[1],
paths=configdata[2],
excluded_paths=configdata[3]
)
obj.clean()
obj.save()
elif action == "update":
obj = UserGroupAuthorization.objects.get(usergroup=usergroup,domain=configdata[1])
obj.paths = configdata[2]
obj.excluded_paths = configdata[3]
obj.clean()
obj.save()
elif action == "delete":
UserGroupAuthorization.objects.filter(usergroup=usergroup,domain=configdata[1]).delete()
else:
raise Exception("Unknown action '{1}' for table '{0}'".format(table,action))
elif table == "userauthorization":
if action == "add":
obj = UserAuthorization(
user=configdata[0],
domain=configdata[1],
paths=configdata[2],
excluded_paths=configdata[3]
)
obj.clean()
obj.save()
elif action == "update":
obj = UserAuthorization.objects.get(user=configdata[0],domain=configdata[1])
obj.paths = configdata[2]
obj.excluded_paths = configdata[3]
obj.clean()
obj.save()
elif action == "delete":
UserAuthorization.objects.filter(user=configdata[0],domain=configdata[1]).delete()
else:
raise Exception("Unknown action '{1}' for table '{0}'".format(table,action))
else:
raise Exception("Unknown table '{}'".format(table))
cache._authorization_cache_check_time = HourListTaskRunable("authorization cache",settings.AUTHORIZATION_CACHE_CHECK_HOURS)
cache._authorization_cache_check_time.can_run(timezone.localtime() - timedelta(hours=1))
for email,domain,path,result in testcases:
if index == 1 and path == "/about":
#import ipdb;ipdb.set_trace()
pass
if domain == "map.dev.gunfire.com" and email=="staff1@gunfire.com":
#import ipdb;ipdb.set_trace()
pass
self.assertEqual(can_access(email,domain,path),result,
msg="Test scenario({}): {} should {} the permission to access https://{}{}".format(index,email,"have" if result else "not have",domain,path)
)
| [
"rockyc@kens-mate-001.corporateict.domain"
] | rockyc@kens-mate-001.corporateict.domain |
d60b27a09af020085f55092b28cf65da6aae07f6 | 5345cc368ac108776188118d417a8aff8604ec0c | /tests/fields/test_registering.py | 83183bb4691b6316d9e0d783674e0c8fd433bddd | [
"MIT"
] | permissive | hochshi/wagtailstreamforms | 7229097390c34dd100d812a35d7d74b0092479f7 | 8be02c5606d87d0e7f4f648866c36290207163a8 | refs/heads/3-dev | 2020-04-14T17:49:10.846230 | 2019-06-11T08:25:27 | 2019-06-11T08:25:27 | 163,995,051 | 1 | 0 | MIT | 2019-06-11T08:25:28 | 2019-01-03T16:10:34 | Python | UTF-8 | Python | false | false | 463 | py | from django import forms
from wagtailstreamforms import fields
from ..test_case import AppTestCase
class MyField(fields.BaseField):
field_class = forms.CharField
class TestFieldRegistering(AppTestCase):
@classmethod
def setUpClass(cls):
fields.register('myfield', MyField)
@classmethod
def tearDownClass(cls):
del fields._fields['myfield']
def test_field(self):
self.assertIn('myfield', fields.get_fields())
| [
"stuart@accentdesign.co.uk"
] | stuart@accentdesign.co.uk |
d0791d0c24b78ce2b664deac0f6b69070ad79928 | 7e8f67b9b3c7d17b49c2f9677afea78245e8b29f | /accounts/tests/views/test_captive.py | fd0150e96c1ed234d9c86241eefdb1c154683fe1 | [] | no_license | deone/billing-v1 | 23672addfbe8479a45ccf976cafdf6cbe1220834 | ebb933ec2453810fb1c0f565efa8142c82743b85 | refs/heads/master | 2021-08-18T04:17:25.435711 | 2019-06-17T12:52:50 | 2019-06-17T12:52:50 | 90,369,896 | 0 | 0 | null | 2021-06-10T18:38:10 | 2017-05-05T11:40:45 | Python | UTF-8 | Python | false | false | 2,802 | py | from django.core.urlresolvers import reverse
from ...forms import LoginForm
from . import ViewsTests
class CaptiveTests(ViewsTests):
def test_captive(self):
get_params = "?login_url=https%3A%2F%2Fn110.network-auth.com%2Fsplash%2Flogin%3Fmauth%3DMMzZUJGqtrsmvkKw6ktCkcNsuBgluav4m2vgE4p-nFliz6lOzP99ntPzZAjvJ_Yit73ZfWwRDIzoEAwzZSuErRpQwdfD0vVA3XjsLLlK8UNiucySNAij7FEqEAF9osnXpWioNcUpyn7BYW8pP5C-wdZAQpLAWS-lv4UTivlfTUn92n4RxMaWG52Q%26continue_url%3Dhttps%253A%252F%252Fn110.network-auth.com%252Fsplash%252Fconnected%253Fhide_terms%253Dtrue&continue_url=https%3A%2F%2Fn110.network-auth.com%2Fsplash%2Fconnected%3Fhide_terms%3Dtrue&ap_mac=00%3A18%3A0a%3Af2%3Ade%3A20&ap_name=Djungle+HQ+02&ap_tags=office-accra+recently-added&client_mac=4c%3A8d%3A79%3Ad7%3A6b%3A28&client_ip=192.168.2.65"
response = self.c.get(''.join([reverse('captive'), get_params]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Log In')
self.assertTrue(isinstance(response.context['form'], LoginForm))
self.assertTrue('login_url' in response.context)
self.assertTrue('success_url' in response.context)
def test_captive_with_error_message(self):
get_params = "?login_url=https%3A%2F%2Fn110.network-auth.com%2Fsplash%2Flogin%3Fmauth%3DMMzZUJGqtrsmvkKw6ktCkcNsuBgluav4m2vgE4p-nFliz6lOzP99ntPzZAjvJ_Yit73ZfWwRDIzoEAwzZSuErRpQwdfD0vVA3XjsLLlK8UNiucySNAij7FEqEAF9osnXpWioNcUpyn7BYW8pP5C-wdZAQpLAWS-lv4UTivlfTUn92n4RxMaWG52Q%26continue_url%3Dhttps%253A%252F%252Fn110.network-auth.com%252Fsplash%252Fconnected%253Fhide_terms%253Dtrue&error_message=Access+denied+for+herbertellisspectradjungle%40spectrawireless.com&continue_url=https%3A%2F%2Fn110.network-auth.com%2Fsplash%2Fconnected%3Fhide_terms%3Dtrue&ap_mac=00%3A18%3A0a%3Af2%3Ade%3A20&ap_name=Djungle+HQ+02&ap_tags=office-accra+recently-added&client_mac=4c%3A8d%3A79%3Ad7%3A6b%3A28&client_ip=192.168.2.65"
response = self.c.get(''.join([reverse('captive'), get_params]))
self.assertEqual(response.status_code, 200)
self.assertTrue('error_message' in response.context)
def test_captive_without_get_params(self):
response = self.c.get(reverse('captive'))
self.assertEqual(response.status_code, 404)
def test_success(self):
get_params = "?logout_url=https%3A%2F%2Fn110.network-auth.com%2Fsplash%2Flogout%3Fkey%3DMM7n9oxmBMVzgXgqkvAbLsLTh2cP7lcZdnhrqPRdHlIqzFHCNSRkxoiKzMGmTDQw7dGd092BdPfUs"
response = self.c.get(''.join([reverse('success'), get_params]))
self.assertEqual(response.status_code, 200)
self.assertTrue('logout_url' in response.context)
def test_success_without_get_params(self):
response = self.c.get(reverse('success'))
self.assertEqual(response.status_code, 200)
| [
"alwaysdeone@gmail.com"
] | alwaysdeone@gmail.com |
bd640c0792b3901848aa7820f8ec89682ceb850c | 77c32baf29e5718a07fec9dfaee89cdff3c0f23d | /instance/migrations/0001_initial.py | 62952b23e6eef1835c52c02b26b0ce56df746423 | [] | no_license | syed-saif/hackathon_backend | f438c49268e182ae6a51b9f1650c02e423ea32cd | 37a092cfb27c6e1be3652e17eea3b712ce9b3fd1 | refs/heads/main | 2023-02-09T16:16:51.242683 | 2021-01-12T12:33:03 | 2021-01-12T12:33:03 | 328,973,595 | 0 | 0 | null | 2021-01-12T12:02:59 | 2021-01-12T12:02:58 | null | UTF-8 | Python | false | false | 763 | py | # Generated by Django 3.1.3 on 2020-11-25 16:04
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DetecHandWritten',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('image', models.ImageField(blank=True, null=True, upload_to='detect_images/')),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated_at', models.DateTimeField(auto_now=True)),
],
),
]
| [
"="
] | = |
9aa3c92f85d67695e2c53c6a11ecb3381ce8adb5 | d02261797ab1f6d9ba85370fbb5d73e84390154e | /hunt/special_puzzles/ktane/manual/grandom.py | fb776263fe43cec87e02e1362b35575be15f8e0b | [
"MIT"
] | permissive | YewLabs/2021-hunt | 2c965da93b92d0a53bfa25938b376ebefecc241d | c6ea6bdf17571642ee4e7463a2a363292ff9f972 | refs/heads/master | 2023-03-04T04:16:12.412893 | 2021-02-07T15:48:01 | 2021-02-07T16:50:45 | 336,824,373 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 5,180 | py | from random import Random
class GRandom(Random):
def shuffled(self, x):
x = list(x)
self.shuffle(x)
return x
def distrib(self, total, count, *, min_=0, max_=None, skew=1):
if min_ * count > total:
raise ValueError(
f"The total must be at least {min_}*{count}={min_*count} "
f"when count={count} and min_={min_}"
)
if max_ is not None and max_ * count < total:
raise ValueError(
f"The total must be at most {max_}*{count}={max_*count} "
f"when count={count} and max_={max_}"
)
if skew <= 0:
raise ValueError("The skew has to be at least 1.")
if max_ is None:
max_ = total
dist = [min_] * count
inds = self.shuffled(range(count))
for it in range(total - min_ * count):
while True:
assert inds
idx = min(self.randrange(len(inds)) for it in range(skew))
if dist[inds[idx]] < max_:
dist[inds[idx]] += 1
break
else:
inds[idx], inds[-1] = inds[-1], inds[idx]
inds.pop()
assert sum(dist) == total
assert min_ <= min(dist) <= max(dist) <= max_
return dist
def data(self, arg, n=1):
length = len(self.data_[arg])
k = length + n + 1 if n < 0 else n
return self.sample(self.data_[arg], k)
def directions(self):
return "".join(self.shuffled("CGNESW"))
def side(self):
return self.choice("UDFLBR")
def talk_press(self):
return "".join(self.choice("MmNn") for _ in range(4))
def range_(self, min_, avg, max_):
min_avg, max_avg = int((min_ + avg) / 2), int((max_ + avg) / 2)
ch = self.randint(0, 6)
if ch <= 1:
return {"min": self.randint(avg, max_avg)}
elif ch <= 3:
return {"max": self.randint(min_avg, avg)}
elif ch <= 5:
z = self.randint(min_avg, max_avg)
return {"min": z, "max": z}
else:
return {
"min": self.randint(min_avg, avg),
"max": self.randint(avg, max_avg),
}
def date_range_(self):
ch = self.randint(0, 6)
if ch <= 2:
res = {"obj": "day", "min": 1, "max": 31}
res.update(self.range_(1, 14, 31))
return res
elif ch <= 4:
res = {"obj": "month", "min": 1, "max": 12}
res.update(self.range_(1, 4, 12))
return res
else:
res = {"obj": "year", "min": 2000, "max": 2020}
res.update(self.range_(2000, 2010, 2020))
return res
def range(self, obj):
if obj == "date":
res = self.date_range_()
else:
res = {"obj": obj}
res.update(self.range_(*self.data_["bounds"][obj]))
return {"range": res}
def simple_(self, objs, extra=None):
new_objs = objs[:]
if extra:
new_objs.extend(extra)
obj = self.choice(new_objs)
if extra and obj in extra:
return {obj: self.data(obj)[0]}
ch = self.randint(0, 5)
if ch == 0 and obj != "date":
return {"odd": obj}
elif ch == 1 and obj != "date":
return {"even": obj}
return self.range(obj)
def condition_(self, objs, complexity=0, extra=None):
# objs: ["batteries", "ports", "date", "serial digit"]
# extra: ["gravity", "serial has"]
if complexity == 3:
res = self.condition_(objs, 2, extra)
return res if self.randint(0, 4) else {"not": res}
elif complexity == 2:
ch = self.randint(0, 3)
if ch <= 1 and len(objs) > 1:
head, *tail = objs
return {
"and"
if ch
else "or": [
self.condition_([head], 1),
self.condition_(tail, 1, extra),
]
}
return self.condition_(objs, 1, extra)
elif complexity == 1:
res = self.simple_(objs, extra)
has = lambda x: x in res["range"]
one_sided = "range" in res and (has("min") != has("max"))
return {"not": res} if not (self.randint(0, 3) or one_sided) else res
return self.simple_(objs, extra)
def simplify(self, res):
# de morgan (and not not -> not or)
if "and" in res and all("not" in x for x in res["and"]):
return self.simplify({"not": {"or": [x["not"] for x in res["and"]]}})
# de morgan (not and -> or not not)
if "not" in res and "and" in res["not"]:
return self.simplify({"or": [self.simplify({"not": x}) for x in res["not"]["and"]]})
# double negation
if "not" in res and "not" in res["not"]:
return self.simplify(res["not"]["not"])
return res
def condition(self, *args):
return self.simplify(self.condition_(*args))
| [
"dvorak42@mit.edu"
] | dvorak42@mit.edu |
568fbfe4e7d7d0dddaf488b5339808d9e0641214 | f3eae8877d8065abced3ad5eadc1a084c9569e80 | /functional_preprocessing/topup-version/struct_preproc/structural.py | 560febe8d64410fbd04fde4e56564066aa1356a7 | [] | no_license | fBeyer89/RSV_rsanalysis | 586afc52a5a93fb681166fd7ee0795d0197a3a63 | 1128ace44f52143e94d9c98865c084d30aeca36c | refs/heads/master | 2020-04-09T05:46:54.200643 | 2019-04-18T14:22:42 | 2019-04-18T14:22:42 | 92,715,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,213 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Feb 9 14:33:51 2015
@author: fbeyer
"""
'''
Main workflow for preprocessing of mprage data
===============================================
Uses file structure set up by conversion
'''
from nipype.pipeline.engine import Node, Workflow
import nipype.interfaces.io as nio
from reconall import create_reconall_pipeline
from mgzconvert import create_mgzconvert_pipeline
from ants import create_normalize_pipeline
#from brainextract import create_brainextract_pipeline
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir, standard_brain):
# main workflow
struct_preproc = Workflow(name='anat_preproc')
struct_preproc.base_dir = working_dir
struct_preproc.config['execution']['crashdump_dir'] = struct_preproc.base_dir + "/crash_files"
# select files
#templates={'anat': '3T/nifti/MPRAGEADNI32Ch.nii.gz'}
#selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir), name="selectfiles")
# workflow to run freesurfer reconall
reconall=create_reconall_pipeline()
reconall.inputs.inputnode.fs_subjects_dir=freesurfer_dir
reconall.inputs.inputnode.fs_subject_id=subject
# workflow to get brain, head and wmseg from freesurfer and convert to nifti
mgzconvert = create_mgzconvert_pipeline()
mgzconvert.inputs.inputnode.fs_subjects_dir=freesurfer_dir
mgzconvert.inputs.inputnode.fs_subject_id=subject
normalize = create_normalize_pipeline()
normalize.inputs.inputnode.standard = standard_brain
# sink to store files
sink = Node(nio.DataSink(base_directory=out_dir,
parameterization=False,
substitutions=[
('transform_Warped', 'T1_brain2mni')]),
name='sink')
# connections
struct_preproc.connect(
[#(selectfiles, sink, [('anat', 'outputnode.test')]),
#(selectfiles, reconall, [('anat', 'inputnode.anat')]),
#(reconall, mgzconvert, [('outputnode.fs_subject_id', 'inputnode.fs_subject_id'),
# ('outputnode.fs_subjects_dir', 'inputnode.fs_subjects_dir')]),
#for second round of structural don't redo FREESURFER
(mgzconvert, normalize, [('outputnode.anat_brain', 'inputnode.anat')]),
(mgzconvert, sink, [('outputnode.anat_head', '@head')]),
(mgzconvert, sink, [('outputnode.anat_brain', '@brain')]),
(mgzconvert, sink, [('outputnode.anat_brain_mask', '@mask')]),
(mgzconvert, sink, [('outputnode.wmedge', '@wmedge')]),
(normalize, sink, [('outputnode.anat2std', '@anat2std'),
('outputnode.anat2std_transforms', 'transforms2mni.@anat2std_transforms'),
('outputnode.std2anat_transforms', 'transforms2mni.@std2anat_transforms')])
])
struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
# struct_preproc.run()
struct_preproc.run() #, plugin_args = {'initial_specs': 'request_memory = 1500'}plugin='CondorDAGMan'
#struct_preproc.run(plugin='MultiProc')
| [
"fbeyer@cbs.mpg.de"
] | fbeyer@cbs.mpg.de |
0cb1eef4e1b828ab9c069f0b9fbd70fb3d42629f | 321b4ed83b6874eeb512027eaa0b17b0daf3c289 | /165/165.compare-version-numbers.163040956.Runtime-Error.leetcode.py | f2dc2de336fe3c1104efe74edcd9f208144c57ac | [] | no_license | huangyingw/submissions | 7a610613bdb03f1223cdec5f6ccc4391149ca618 | bfac1238ecef8b03e54842b852f6fec111abedfa | refs/heads/master | 2023-07-25T09:56:46.814504 | 2023-07-16T07:38:36 | 2023-07-16T07:38:36 | 143,352,065 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | class Solution(object):
def compareVersion(self, version1, version2):
arr1 = version1.split(".")
arr2 = version2.split(".")
i = 0
while(i < len(arr1)):
if int(arr2[i]) > int(arr1[i]):
return -1
if int(arr1[i]) > int(arr2[i]):
return 1
i += 1
return 0
| [
"huangyingw@gmail.com"
] | huangyingw@gmail.com |
3fd9e0f64632f7bc58bafda53149574b330177da | 8f6aa9ac9c8c2e409875bbf36fbc49b3eb37d88b | /enthought/pyface/timer/api.py | f78cc07f819be65fe9879384ef67c9e7a6bfd889 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | enthought/etsproxy | 5660cf562c810db2ceb6b592b6c12274bce96d73 | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | refs/heads/master | 2023-03-27T04:51:29.297305 | 2020-12-02T09:05:18 | 2020-12-02T09:05:18 | 1,632,969 | 3 | 1 | NOASSERTION | 2020-12-02T09:05:20 | 2011-04-18T22:29:56 | Python | UTF-8 | Python | false | false | 46 | py | # proxy module
from pyface.timer.api import *
| [
"ischnell@enthought.com"
] | ischnell@enthought.com |
68bcfd2922d3dba3e542fed5e919fa83143d9bfa | ba602dc67ad7bb50133aeb312f3c6c54627b3dec | /data/3922/AC_py/508134.py | 7f1f09ae0a4fe4b50f052a90e596911ef725a583 | [] | no_license | Dearyyyyy/TCG | 0d21d89275906157372d775f33309ce337e6bc95 | 7b80de16de2d3f5d95a7c4ed95d45a9e38882e67 | refs/heads/master | 2020-12-27T23:19:44.845918 | 2020-02-04T01:59:23 | 2020-02-04T01:59:23 | 238,101,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | # coding=utf-8
import sys
while True:
a,b=map(float,input().split())
if b!=0:
print(int((a/b+0.5)//1))
else:
print("error") | [
"543271544@qq.com"
] | 543271544@qq.com |
d1ebe71d5c08688b5518526b99952e558aa18674 | a439ca43178d38cfe6daaee50ea134ca6c52b502 | /thaniya_server_archive/src/thaniya_server_archive/volumes/__init__.py | be1415d1034a205e635c0328cb07d874915aaa23 | [
"Apache-2.0"
] | permissive | jkpubsrc/Thaniya | 37ca727abdc6f9f605257813889fe3a033995bba | 4ebdf2854e3d7888af7396adffa22628b4ab2267 | refs/heads/master | 2023-03-05T20:58:59.528746 | 2021-02-15T19:31:06 | 2021-02-15T19:31:06 | 331,318,787 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py |
__version__ = "0.2021.1.20.1"
from .BackupVolumeID import BackupVolumeID
from ._RawDeviceIterator import _RawDeviceIterator
from .Device import Device
from .DeviceIterator import DeviceIterator
from .BackupVolumeInfo import BackupVolumeInfo
from .BackupVolumeCfgFile import BackupVolumeCfgFile
from .BackupVolumeManager import BackupVolumeManager | [
"pubsrc@binary-overflow.de"
] | pubsrc@binary-overflow.de |
ddb2eb6438bfbc1bf13b102cc1c5ec3d453ebb8e | b26c41926fa3a7c2c061132d80e91a2750f2f468 | /tensorflow_probability/python/internal/backend/numpy/gen/linear_operator_block_diag.py | 7f5fe18bbae95ec7ad079b657d701b1a25c3710d | [
"Apache-2.0"
] | permissive | tensorflow/probability | 22e679a4a883e408f8ef237cda56e3e3dfa42b17 | 42a64ba0d9e0973b1707fcd9b8bd8d14b2d4e3e5 | refs/heads/main | 2023-09-04T02:06:08.174935 | 2023-08-31T20:30:00 | 2023-08-31T20:31:33 | 108,053,674 | 4,055 | 1,269 | Apache-2.0 | 2023-09-13T21:49:49 | 2017-10-23T23:50:54 | Jupyter Notebook | UTF-8 | Python | false | false | 35,254 | py | # Copyright 2020 The TensorFlow Probability Authors. All Rights Reserved.
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# THIS FILE IS AUTO-GENERATED BY `gen_linear_operators.py`.
# DO NOT MODIFY DIRECTLY.
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# pylint: disable=g-import-not-at-top
# pylint: disable=g-direct-tensorflow-import
# pylint: disable=g-bad-import-order
# pylint: disable=unused-import
# pylint: disable=line-too-long
# pylint: disable=reimported
# pylint: disable=g-bool-id-comparison
# pylint: disable=g-statement-before-imports
# pylint: disable=bad-continuation
# pylint: disable=useless-import-alias
# pylint: disable=property-with-parameters
# pylint: disable=trailing-whitespace
# pylint: disable=g-inconsistent-quotes
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Create a Block Diagonal operator from one or more `LinearOperators`."""
from tensorflow_probability.python.internal.backend.numpy import ops as common_shapes
from tensorflow_probability.python.internal.backend.numpy import dtype as dtypes
from tensorflow_probability.python.internal.backend.numpy import ops
# from tensorflow.python.framework import tensor_conversion
from tensorflow_probability.python.internal.backend.numpy.gen import tensor_shape
from tensorflow_probability.python.internal.backend.numpy import numpy_array as array_ops
from tensorflow_probability.python.internal.backend.numpy import numpy_array as array_ops_stack
from tensorflow_probability.python.internal.backend.numpy import debugging as check_ops
from tensorflow_probability.python.internal.backend.numpy import control_flow as control_flow_ops
from tensorflow_probability.python.internal.backend.numpy.gen import linear_operator
from tensorflow_probability.python.internal.backend.numpy.gen import linear_operator_util
from tensorflow_probability.python.internal.backend.numpy.gen import property_hint_util
# from tensorflow.python.util.tf_export import tf_export
__all__ = ["LinearOperatorBlockDiag"]
# @tf_export("linalg.LinearOperatorBlockDiag")
# @linear_operator.make_composite_tensor
class LinearOperatorBlockDiag(linear_operator.LinearOperator):
"""Combines one or more `LinearOperators` in to a Block Diagonal matrix.
This operator combines one or more linear operators `[op1,...,opJ]`,
building a new `LinearOperator`, whose underlying matrix representation
has each operator `opi` on the main diagonal, and zero's elsewhere.
#### Shape compatibility
If `opj` acts like a [batch] matrix `Aj`, then `op_combined` acts like
the [batch] matrix formed by having each matrix `Aj` on the main
diagonal.
Each `opj` is required to represent a matrix, and hence will have
shape `batch_shape_j + [M_j, N_j]`.
If `opj` has shape `batch_shape_j + [M_j, N_j]`, then the combined operator
has shape `broadcast_batch_shape + [sum M_j, sum N_j]`, where
`broadcast_batch_shape` is the mutual broadcast of `batch_shape_j`,
`j = 1,...,J`, assuming the intermediate batch shapes broadcast.
Arguments to `matmul`, `matvec`, `solve`, and `solvevec` may either be single
`Tensor`s or lists of `Tensor`s that are interpreted as blocks. The `j`th
element of a blockwise list of `Tensor`s must have dimensions that match
`opj` for the given method. If a list of blocks is input, then a list of
blocks is returned as well.
When the `opj` are not guaranteed to be square, this operator's methods might
fail due to the combined operator not being square and/or lack of efficient
methods.
```python
# Create a 4 x 4 linear operator combined of two 2 x 2 operators.
operator_1 = LinearOperatorFullMatrix([[1., 2.], [3., 4.]])
operator_2 = LinearOperatorFullMatrix([[1., 0.], [0., 1.]])
operator = LinearOperatorBlockDiag([operator_1, operator_2])
operator.to_dense()
==> [[1., 2., 0., 0.],
[3., 4., 0., 0.],
[0., 0., 1., 0.],
[0., 0., 0., 1.]]
tensor_shape.TensorShape(operator.shape)
==> [4, 4]
operator.log_abs_determinant()
==> scalar Tensor
x1 = ... # Shape [2, 2] Tensor
x2 = ... # Shape [2, 2] Tensor
x = tf.concat([x1, x2], 0) # Shape [2, 4] Tensor
operator.matmul(x)
==> tf.concat([operator_1.matmul(x1), operator_2.matmul(x2)])
# Create a 5 x 4 linear operator combining three blocks.
operator_1 = LinearOperatorFullMatrix([[1.], [3.]])
operator_2 = LinearOperatorFullMatrix([[1., 6.]])
operator_3 = LinearOperatorFullMatrix([[2.], [7.]])
operator = LinearOperatorBlockDiag([operator_1, operator_2, operator_3])
operator.to_dense()
==> [[1., 0., 0., 0.],
[3., 0., 0., 0.],
[0., 1., 6., 0.],
[0., 0., 0., 2.]]
[0., 0., 0., 7.]]
tensor_shape.TensorShape(operator.shape)
==> [5, 4]
# Create a [2, 3] batch of 4 x 4 linear operators.
matrix_44 = tf.random.normal(shape=[2, 3, 4, 4])
operator_44 = LinearOperatorFullMatrix(matrix)
# Create a [1, 3] batch of 5 x 5 linear operators.
matrix_55 = tf.random.normal(shape=[1, 3, 5, 5])
operator_55 = LinearOperatorFullMatrix(matrix_55)
# Combine to create a [2, 3] batch of 9 x 9 operators.
operator_99 = LinearOperatorBlockDiag([operator_44, operator_55])
# Create a shape [2, 3, 9] vector.
x = tf.random.normal(shape=[2, 3, 9])
operator_99.matmul(x)
==> Shape [2, 3, 9] Tensor
# Create a blockwise list of vectors.
x = [tf.random.normal(shape=[2, 3, 4]), tf.random.normal(shape=[2, 3, 5])]
operator_99.matmul(x)
==> [Shape [2, 3, 4] Tensor, Shape [2, 3, 5] Tensor]
```
#### Performance
The performance of `LinearOperatorBlockDiag` on any operation is equal to
the sum of the individual operators' operations.
#### Matrix property hints
This `LinearOperator` is initialized with boolean flags of the form `is_X`,
for `X = non_singular, self_adjoint, positive_definite, square`.
These have the following meaning:
* If `is_X == True`, callers should expect the operator to have the
property `X`. This is a promise that should be fulfilled, but is *not* a
runtime assert. For example, finite floating point precision may result
in these promises being violated.
* If `is_X == False`, callers should expect the operator to not have `X`.
* If `is_X == None` (the default), callers should have no expectation either
way.
"""
def __init__(self,
operators,
is_non_singular=None,
is_self_adjoint=None,
is_positive_definite=None,
is_square=True,
name=None):
r"""Initialize a `LinearOperatorBlockDiag`.
`LinearOperatorBlockDiag` is initialized with a list of operators
`[op_1,...,op_J]`.
Args:
operators: Iterable of `LinearOperator` objects, each with
the same `dtype` and composable shape.
is_non_singular: Expect that this operator is non-singular.
is_self_adjoint: Expect that this operator is equal to its hermitian
transpose.
is_positive_definite: Expect that this operator is positive definite,
meaning the quadratic form `x^H A x` has positive real part for all
nonzero `x`. Note that we do not require the operator to be
self-adjoint to be positive-definite. See:
https://en.wikipedia.org/wiki/Positive-definite_matrix#Extension_for_non-symmetric_matrices
is_square: Expect that this operator acts like square [batch] matrices.
This is true by default, and will raise a `ValueError` otherwise.
name: A name for this `LinearOperator`. Default is the individual
operators names joined with `_o_`.
Raises:
TypeError: If all operators do not have the same `dtype`.
ValueError: If `operators` is empty or are non-square.
"""
parameters = dict(
operators=operators,
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
name=name
)
# Validate operators.
check_ops.assert_proper_iterable(operators)
operators = list(operators)
if not operators:
raise ValueError(
"Expected a non-empty list of operators. Found: %s" % operators)
self._operators = operators
# Define diagonal operators, for functions that are shared across blockwise
# `LinearOperator` types.
self._diagonal_operators = operators
# Validate dtype.
dtype = operators[0].dtype
for operator in operators:
if operator.dtype != dtype:
name_type = (str((o.name, o.dtype)) for o in operators)
raise TypeError(
"Expected all operators to have the same dtype. Found %s"
% " ".join(name_type))
# Auto-set and check hints.
if all(operator.is_non_singular for operator in operators):
if is_non_singular is False:
raise ValueError(
"The direct sum of non-singular operators is always non-singular.")
is_non_singular = True
if all(operator.is_self_adjoint for operator in operators):
if is_self_adjoint is False:
raise ValueError(
"The direct sum of self-adjoint operators is always self-adjoint.")
is_self_adjoint = True
if all(operator.is_positive_definite for operator in operators):
if is_positive_definite is False:
raise ValueError(
"The direct sum of positive definite operators is always "
"positive definite.")
is_positive_definite = True
if name is None:
# Using ds to mean direct sum.
name = "_ds_".join(operator.name for operator in operators)
with ops.name_scope(name):
super(LinearOperatorBlockDiag, self).__init__(
dtype=dtype,
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
parameters=parameters,
name=name)
@property
def operators(self):
return self._operators
def _block_range_dimensions(self):
return [op.range_dimension for op in self._diagonal_operators]
def _block_domain_dimensions(self):
return [op.domain_dimension for op in self._diagonal_operators]
def _block_range_dimension_tensors(self):
return [op.range_dimension_tensor() for op in self._diagonal_operators]
def _block_domain_dimension_tensors(self):
return [op.domain_dimension_tensor() for op in self._diagonal_operators]
def _shape(self):
# Get final matrix shape.
domain_dimension = sum(self._block_domain_dimensions())
range_dimension = sum(self._block_range_dimensions())
matrix_shape = tensor_shape.TensorShape([range_dimension, domain_dimension])
# Get broadcast batch shape.
# broadcast_shape checks for compatibility.
batch_shape = self.operators[0].batch_shape
for operator in self.operators[1:]:
batch_shape = common_shapes.broadcast_shape(
batch_shape, operator.batch_shape)
return batch_shape.concatenate(matrix_shape)
def _shape_tensor(self):
# Avoid messy broadcasting if possible.
if tensor_shape.TensorShape(self.shape).is_fully_defined():
return ops.convert_to_tensor(
tensor_shape.TensorShape(self.shape).as_list(), dtype=dtypes.int32, name="shape"
)
domain_dimension = sum(self._block_domain_dimension_tensors())
range_dimension = sum(self._block_range_dimension_tensors())
matrix_shape = array_ops_stack.stack([range_dimension, domain_dimension])
# Dummy Tensor of zeros. Will never be materialized.
zeros = array_ops.zeros(shape=self.operators[0].batch_shape_tensor())
for operator in self.operators[1:]:
zeros = zeros + array_ops.zeros(shape=operator.batch_shape_tensor())
batch_shape = prefer_static.shape(zeros)
return prefer_static.concat((batch_shape, matrix_shape), 0)
def _linop_adjoint(self) -> "LinearOperatorBlockDiag":
# We take the adjoint of each block on the diagonal.
return LinearOperatorBlockDiag(
operators=[operator.adjoint() for operator in self.operators],
is_non_singular=self.is_non_singular,
is_self_adjoint=self.is_self_adjoint,
is_positive_definite=self.is_positive_definite,
is_square=True)
def _linop_cholesky(self) -> "LinearOperatorBlockDiag":
# We take the cholesky of each block on the diagonal.
return LinearOperatorBlockDiag(
operators=[operator.cholesky() for operator in self.operators],
is_non_singular=True,
is_self_adjoint=None, # Let the operators passed in decide.
is_square=True)
def _linop_inverse(self) -> "LinearOperatorBlockDiag":
# We take the inverse of each block on the diagonal.
return LinearOperatorBlockDiag(
operators=[
operator.inverse() for operator in self.operators],
is_non_singular=self.is_non_singular,
is_self_adjoint=self.is_self_adjoint,
is_positive_definite=self.is_positive_definite,
is_square=True)
def _linop_matmul(
self,
left_operator: "LinearOperatorBlockDiag",
right_operator: linear_operator.LinearOperator,
) -> linear_operator.LinearOperator:
if isinstance(right_operator, LinearOperatorBlockDiag):
return LinearOperatorBlockDiag(
operators=[
o1.matmul(o2) for o1, o2 in zip(
left_operator.operators, right_operator.operators)],
is_non_singular=property_hint_util.combined_non_singular_hint(
left_operator, right_operator),
# In general, a product of self-adjoint positive-definite
# block diagonal matrices is not self-adjoint.
is_self_adjoint=None,
# In general, a product of positive-definite block diagonal
# matrices is not positive-definite.
is_positive_definite=None,
is_square=True)
return super()._linop_matmul(left_operator, right_operator)
def _linop_solve(
self,
left_operator: "LinearOperatorBlockDiag",
right_operator: linear_operator.LinearOperator,
) -> linear_operator.LinearOperator:
if isinstance(right_operator, LinearOperatorBlockDiag):
return LinearOperatorBlockDiag(
operators=[
o1.solve(o2) for o1, o2 in zip(
left_operator.operators, right_operator.operators)],
is_non_singular=property_hint_util.combined_non_singular_hint(
left_operator, right_operator),
# In general, a solve of self-adjoint positive-definite block diagonal
# matrices is not self = self - adjoint.
is_self_adjoint=None,
# In general, a solve of positive-definite block diagonal matrices is
# not positive-definite.
is_positive_definite=None,
is_square=True)
return super()._linop_solve(left_operator, right_operator)
# TODO(b/188080761): Add a more efficient implementation of `cond` that
# constructs the condition number from the blockwise singular values.
def matmul(self, x, adjoint=False, adjoint_arg=False, name="matmul"):
"""Transform [batch] matrix `x` with left multiplication: `x --> Ax`.
```python
# Make an operator acting like batch matrix A. Assume tensor_shape.TensorShape(A.shape) = [..., M, N]
operator = LinearOperator(...)
tensor_shape.TensorShape(operator.shape) = [..., M, N]
X = ... # shape [..., N, R], batch matrix, R > 0.
Y = operator.matmul(X)
tensor_shape.TensorShape(Y.shape)
==> [..., M, R]
Y[..., :, r] = sum_j A[..., :, j] X[j, r]
```
Args:
x: `LinearOperator`, `Tensor` with compatible shape and same `dtype` as
`self`, or a blockwise iterable of `LinearOperator`s or `Tensor`s. See
class docstring for definition of shape compatibility.
adjoint: Python `bool`. If `True`, left multiply by the adjoint: `A^H x`.
adjoint_arg: Python `bool`. If `True`, compute `A x^H` where `x^H` is
the hermitian transpose (transposition and complex conjugation).
name: A name for this `Op`.
Returns:
A `LinearOperator` or `Tensor` with shape `[..., M, R]` and same `dtype`
as `self`, or if `x` is blockwise, a list of `Tensor`s with shapes that
concatenate to `[..., M, R]`.
"""
def _check_operators_agree(r, l, message):
if (r.range_dimension is not None and
l.domain_dimension is not None and
r.range_dimension != l.domain_dimension):
raise ValueError(message)
if isinstance(x, linear_operator.LinearOperator):
left_operator = self.adjoint() if adjoint else self
right_operator = x.adjoint() if adjoint_arg else x
_check_operators_agree(
right_operator, left_operator,
"Operators are incompatible. Expected `x` to have dimension"
" {} but got {}.".format(
left_operator.domain_dimension, right_operator.range_dimension))
# We can efficiently multiply BlockDiag LinearOperators if the number of
# blocks agree.
if isinstance(x, LinearOperatorBlockDiag):
if len(left_operator.operators) != len(right_operator.operators):
raise ValueError(
"Can not efficiently multiply two `LinearOperatorBlockDiag`s "
"together when number of blocks differ.")
for o1, o2 in zip(left_operator.operators, right_operator.operators):
_check_operators_agree(
o2, o1,
"Blocks are incompatible. Expected `x` to have dimension"
" {} but got {}.".format(
o1.domain_dimension, o2.range_dimension))
with self._name_scope(name): # pylint: disable=not-callable
return self._linop_matmul(left_operator, right_operator)
with self._name_scope(name): # pylint: disable=not-callable
arg_dim = -1 if adjoint_arg else -2
block_dimensions = (self._block_range_dimensions() if adjoint
else self._block_domain_dimensions())
if linear_operator_util.arg_is_blockwise(block_dimensions, x, arg_dim):
for i, block in enumerate(x):
if not isinstance(block, linear_operator.LinearOperator):
block = ops.convert_to_tensor(block)
# self._check_input_dtype(block)
block_dimensions[i].assert_is_compatible_with(tensor_shape.TensorShape(block.shape)[arg_dim])
x[i] = block
else:
x = ops.convert_to_tensor(x, name="x")
# self._check_input_dtype(x)
op_dimension = (self.range_dimension if adjoint
else self.domain_dimension)
op_dimension.assert_is_compatible_with(tensor_shape.TensorShape(x.shape)[arg_dim])
return self._matmul(x, adjoint=adjoint, adjoint_arg=adjoint_arg)
def _matmul(self, x, adjoint=False, adjoint_arg=False):
arg_dim = -1 if adjoint_arg else -2
block_dimensions = (self._block_range_dimensions() if adjoint
else self._block_domain_dimensions())
block_dimensions_fn = (
self._block_range_dimension_tensors if adjoint
else self._block_domain_dimension_tensors)
blockwise_arg = linear_operator_util.arg_is_blockwise(
block_dimensions, x, arg_dim)
if blockwise_arg:
split_x = x
else:
split_dim = -1 if adjoint_arg else -2
# Split input by rows normally, and otherwise columns.
split_x = linear_operator_util.split_arg_into_blocks(
block_dimensions, block_dimensions_fn, x, axis=split_dim)
result_list = []
for index, operator in enumerate(self.operators):
result_list = result_list + [operator.matmul(
split_x[index], adjoint=adjoint, adjoint_arg=adjoint_arg)]
if blockwise_arg:
return result_list
result_list = linear_operator_util.broadcast_matrix_batch_dims(
result_list)
return prefer_static.concat(result_list, axis=-2)
def matvec(self, x, adjoint=False, name="matvec"):
"""Transform [batch] vector `x` with left multiplication: `x --> Ax`.
```python
# Make an operator acting like batch matric A. Assume tensor_shape.TensorShape(A.shape) = [..., M, N]
operator = LinearOperator(...)
X = ... # shape [..., N], batch vector
Y = operator.matvec(X)
tensor_shape.TensorShape(Y.shape)
==> [..., M]
Y[..., :] = sum_j A[..., :, j] X[..., j]
```
Args:
x: `Tensor` with compatible shape and same `dtype` as `self`, or an
iterable of `Tensor`s (for blockwise operators). `Tensor`s are treated
a [batch] vectors, meaning for every set of leading dimensions, the last
dimension defines a vector.
See class docstring for definition of compatibility.
adjoint: Python `bool`. If `True`, left multiply by the adjoint: `A^H x`.
name: A name for this `Op`.
Returns:
A `Tensor` with shape `[..., M]` and same `dtype` as `self`.
"""
with self._name_scope(name): # pylint: disable=not-callable
block_dimensions = (self._block_range_dimensions() if adjoint
else self._block_domain_dimensions())
if linear_operator_util.arg_is_blockwise(block_dimensions, x, -1):
for i, block in enumerate(x):
if not isinstance(block, linear_operator.LinearOperator):
block = ops.convert_to_tensor(block)
# self._check_input_dtype(block)
block_dimensions[i].assert_is_compatible_with(tensor_shape.TensorShape(block.shape)[-1])
x[i] = block
x_mat = [block[..., _ops.newaxis] for block in x]
y_mat = self.matmul(x_mat, adjoint=adjoint)
return [array_ops.squeeze(y, axis=-1) for y in y_mat]
x = ops.convert_to_tensor(x, name="x")
# self._check_input_dtype(x)
op_dimension = (self.range_dimension if adjoint
else self.domain_dimension)
op_dimension.assert_is_compatible_with(tensor_shape.TensorShape(x.shape)[-1])
x_mat = x[..., _ops.newaxis]
y_mat = self.matmul(x_mat, adjoint=adjoint)
return array_ops.squeeze(y_mat, axis=-1)
def _determinant(self):
result = self.operators[0].determinant()
for operator in self.operators[1:]:
result = result * operator.determinant()
return result
def _log_abs_determinant(self):
result = self.operators[0].log_abs_determinant()
for operator in self.operators[1:]:
result = result + operator.log_abs_determinant()
return result
def solve(self, rhs, adjoint=False, adjoint_arg=False, name="solve"):
"""Solve (exact or approx) `R` (batch) systems of equations: `A X = rhs`.
The returned `Tensor` will be close to an exact solution if `A` is well
conditioned. Otherwise closeness will vary. See class docstring for details.
Examples:
```python
# Make an operator acting like batch matrix A. Assume tensor_shape.TensorShape(A.shape) = [..., M, N]
operator = LinearOperator(...)
tensor_shape.TensorShape(operator.shape) = [..., M, N]
# Solve R > 0 linear systems for every member of the batch.
RHS = ... # shape [..., M, R]
X = operator.solve(RHS)
# X[..., :, r] is the solution to the r'th linear system
# sum_j A[..., :, j] X[..., j, r] = RHS[..., :, r]
operator.matmul(X)
==> RHS
```
Args:
rhs: `Tensor` with same `dtype` as this operator and compatible shape,
or a list of `Tensor`s (for blockwise operators). `Tensor`s are treated
like a [batch] matrices meaning for every set of leading dimensions, the
last two dimensions defines a matrix.
See class docstring for definition of compatibility.
adjoint: Python `bool`. If `True`, solve the system involving the adjoint
of this `LinearOperator`: `A^H X = rhs`.
adjoint_arg: Python `bool`. If `True`, solve `A X = rhs^H` where `rhs^H`
is the hermitian transpose (transposition and complex conjugation).
name: A name scope to use for ops added by this method.
Returns:
`Tensor` with shape `[...,N, R]` and same `dtype` as `rhs`.
Raises:
NotImplementedError: If `self.is_non_singular` or `is_square` is False.
"""
if self.is_non_singular is False:
raise NotImplementedError(
"Exact solve not implemented for an operator that is expected to "
"be singular.")
if self.is_square is False:
raise NotImplementedError(
"Exact solve not implemented for an operator that is expected to "
"not be square.")
def _check_operators_agree(r, l, message):
if (r.range_dimension is not None and
l.domain_dimension is not None and
r.range_dimension != l.domain_dimension):
raise ValueError(message)
if isinstance(rhs, linear_operator.LinearOperator):
left_operator = self.adjoint() if adjoint else self
right_operator = rhs.adjoint() if adjoint_arg else rhs
_check_operators_agree(
right_operator, left_operator,
"Operators are incompatible. Expected `x` to have dimension"
" {} but got {}.".format(
left_operator.domain_dimension, right_operator.range_dimension))
# We can efficiently solve BlockDiag LinearOperators if the number of
# blocks agree.
if isinstance(right_operator, LinearOperatorBlockDiag):
if len(left_operator.operators) != len(right_operator.operators):
raise ValueError(
"Can not efficiently solve `LinearOperatorBlockDiag` when "
"number of blocks differ.")
for o1, o2 in zip(left_operator.operators, right_operator.operators):
_check_operators_agree(
o2, o1,
"Blocks are incompatible. Expected `x` to have dimension"
" {} but got {}.".format(
o1.domain_dimension, o2.range_dimension))
with self._name_scope(name): # pylint: disable=not-callable
return self._linop_solve(left_operator, right_operator)
with self._name_scope(name): # pylint: disable=not-callable
block_dimensions = (self._block_domain_dimensions() if adjoint
else self._block_range_dimensions())
arg_dim = -1 if adjoint_arg else -2
blockwise_arg = linear_operator_util.arg_is_blockwise(
block_dimensions, rhs, arg_dim)
if blockwise_arg:
split_rhs = rhs
for i, block in enumerate(split_rhs):
if not isinstance(block, linear_operator.LinearOperator):
block = ops.convert_to_tensor(block)
# self._check_input_dtype(block)
block_dimensions[i].assert_is_compatible_with(tensor_shape.TensorShape(block.shape)[arg_dim])
split_rhs[i] = block
else:
rhs = ops.convert_to_tensor(
rhs, name="rhs"
)
# self._check_input_dtype(rhs)
op_dimension = (self.domain_dimension if adjoint
else self.range_dimension)
op_dimension.assert_is_compatible_with(tensor_shape.TensorShape(rhs.shape)[arg_dim])
split_dim = -1 if adjoint_arg else -2
# Split input by rows normally, and otherwise columns.
split_rhs = linear_operator_util.split_arg_into_blocks(
self._block_domain_dimensions(),
self._block_domain_dimension_tensors,
rhs, axis=split_dim)
solution_list = []
for index, operator in enumerate(self.operators):
solution_list = solution_list + [operator.solve(
split_rhs[index], adjoint=adjoint, adjoint_arg=adjoint_arg)]
if blockwise_arg:
return solution_list
solution_list = linear_operator_util.broadcast_matrix_batch_dims(
solution_list)
return prefer_static.concat(solution_list, axis=-2)
def solvevec(self, rhs, adjoint=False, name="solve"):
"""Solve single equation with best effort: `A X = rhs`.
The returned `Tensor` will be close to an exact solution if `A` is well
conditioned. Otherwise closeness will vary. See class docstring for details.
Examples:
```python
# Make an operator acting like batch matrix A. Assume tensor_shape.TensorShape(A.shape) = [..., M, N]
operator = LinearOperator(...)
tensor_shape.TensorShape(operator.shape) = [..., M, N]
# Solve one linear system for every member of the batch.
RHS = ... # shape [..., M]
X = operator.solvevec(RHS)
# X is the solution to the linear system
# sum_j A[..., :, j] X[..., j] = RHS[..., :]
operator.matvec(X)
==> RHS
```
Args:
rhs: `Tensor` with same `dtype` as this operator, or list of `Tensor`s
(for blockwise operators). `Tensor`s are treated as [batch] vectors,
meaning for every set of leading dimensions, the last dimension defines
a vector. See class docstring for definition of compatibility regarding
batch dimensions.
adjoint: Python `bool`. If `True`, solve the system involving the adjoint
of this `LinearOperator`: `A^H X = rhs`.
name: A name scope to use for ops added by this method.
Returns:
`Tensor` with shape `[...,N]` and same `dtype` as `rhs`.
Raises:
NotImplementedError: If `self.is_non_singular` or `is_square` is False.
"""
with self._name_scope(name): # pylint: disable=not-callable
block_dimensions = (self._block_domain_dimensions() if adjoint
else self._block_range_dimensions())
if linear_operator_util.arg_is_blockwise(block_dimensions, rhs, -1):
for i, block in enumerate(rhs):
if not isinstance(block, linear_operator.LinearOperator):
block = ops.convert_to_tensor(block)
# self._check_input_dtype(block)
block_dimensions[i].assert_is_compatible_with(tensor_shape.TensorShape(block.shape)[-1])
rhs[i] = block
rhs_mat = [array_ops.expand_dims(block, axis=-1) for block in rhs]
solution_mat = self.solve(rhs_mat, adjoint=adjoint)
return [array_ops.squeeze(x, axis=-1) for x in solution_mat]
rhs = ops.convert_to_tensor(
rhs, name="rhs"
)
# self._check_input_dtype(rhs)
op_dimension = (self.domain_dimension if adjoint
else self.range_dimension)
op_dimension.assert_is_compatible_with(tensor_shape.TensorShape(rhs.shape)[-1])
rhs_mat = array_ops.expand_dims(rhs, axis=-1)
solution_mat = self.solve(rhs_mat, adjoint=adjoint)
return array_ops.squeeze(solution_mat, axis=-1)
def _diag_part(self):
if not all(operator.is_square for operator in self.operators):
raise NotImplementedError(
"`diag_part` not implemented for an operator whose blocks are not "
"square.")
diag_list = []
for operator in self.operators:
# Extend the axis for broadcasting.
diag_list = diag_list + [operator.diag_part()[..., _ops.newaxis]]
diag_list = linear_operator_util.broadcast_matrix_batch_dims(diag_list)
diagonal = prefer_static.concat(diag_list, axis=-2)
return array_ops.squeeze(diagonal, axis=-1)
def _trace(self):
if not all(operator.is_square for operator in self.operators):
raise NotImplementedError(
"`trace` not implemented for an operator whose blocks are not "
"square.")
result = self.operators[0].trace()
for operator in self.operators[1:]:
result = result + operator.trace()
return result
def _to_dense(self):
num_cols = 0
rows = []
broadcasted_blocks = [operator.to_dense() for operator in self.operators]
broadcasted_blocks = linear_operator_util.broadcast_matrix_batch_dims(
broadcasted_blocks)
for block in broadcasted_blocks:
batch_row_shape = prefer_static.shape(block)[:-1]
zeros_to_pad_before_shape = prefer_static.concat(
[batch_row_shape, [num_cols]], axis=-1)
zeros_to_pad_before = array_ops.zeros(
shape=zeros_to_pad_before_shape, dtype=block.dtype)
num_cols = num_cols + prefer_static.shape(block)[-1]
zeros_to_pad_after_shape = prefer_static.concat(
[batch_row_shape,
[self.domain_dimension_tensor() - num_cols]], axis=-1)
zeros_to_pad_after = array_ops.zeros(
shape=zeros_to_pad_after_shape, dtype=block.dtype)
rows.append(prefer_static.concat(
[zeros_to_pad_before, block, zeros_to_pad_after], axis=-1))
mat = prefer_static.concat(rows, axis=-2)
tensorshape_util.set_shape(mat, tensor_shape.TensorShape(self.shape))
return mat
def _assert_non_singular(self):
return control_flow_ops.group([
operator.assert_non_singular() for operator in self.operators])
def _assert_self_adjoint(self):
return control_flow_ops.group([
operator.assert_self_adjoint() for operator in self.operators])
def _assert_positive_definite(self):
return control_flow_ops.group([
operator.assert_positive_definite() for operator in self.operators])
def _eigvals(self):
if not all(operator.is_square for operator in self.operators):
raise NotImplementedError(
"`eigvals` not implemented for an operator whose blocks are not "
"square.")
eig_list = []
for operator in self.operators:
# Extend the axis for broadcasting.
eig_list = eig_list + [operator.eigvals()[..., _ops.newaxis]]
eig_list = linear_operator_util.broadcast_matrix_batch_dims(eig_list)
eigs = prefer_static.concat(eig_list, axis=-2)
return array_ops.squeeze(eigs, axis=-1)
@property
def _composite_tensor_fields(self):
return ("operators",)
@property
def _experimental_parameter_ndims_to_matrix_ndims(self):
return {"operators": [0] * len(self.operators)}
import numpy as np
from tensorflow_probability.python.internal.backend.numpy import linalg_impl as _linalg
from tensorflow_probability.python.internal.backend.numpy import ops as _ops
from tensorflow_probability.python.internal.backend.numpy.gen import tensor_shape
from tensorflow_probability.python.internal.backend.numpy import private
distribution_util = private.LazyLoader(
"distribution_util", globals(),
"tensorflow_probability.substrates.numpy.internal.distribution_util")
tensorshape_util = private.LazyLoader(
"tensorshape_util", globals(),
"tensorflow_probability.substrates.numpy.internal.tensorshape_util")
prefer_static = private.LazyLoader(
"prefer_static", globals(),
"tensorflow_probability.substrates.numpy.internal.prefer_static")
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
05a0585efd4927df716a9fdb803cee546318b943 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-5285.py | 9721e555b61a9d96d0739af17c132b22b60e4b6d | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,288 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains($ID:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
260778a99413ef35e79f6adf633bb47c165b2e41 | 2dc8ac19e5f6f5fb8638bbdd1917a15094f3431b | /correlation discovery/combine_Lynkwifi_and_col_bus_sub_manhattan_each_injured_each_killed/map.py | 1723b4d92d67bad3c40c014d5a2c07ff87df6afb | [] | no_license | rgc292/Capstone_Project | 8182f2d8143e50db2654da343bd82ae66e74b1c3 | 35b16193f2363277fdf691dced704f56da1f8331 | refs/heads/master | 2020-07-01T04:57:03.365113 | 2016-12-20T01:51:42 | 2016-12-20T01:51:42 | 74,094,985 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,401 | py | #!/usr/bin/python
import sys
import numpy as np
import csv
import StringIO
col_bus_sub_year = []
col_bus_sub_month = []
col_bus_sub_day = []
col_bus_sub_lat = []
col_bus_sub_lon = []
wifi_year = []
wifi_month = []
wifi_day = []
wifi_lat = []
wifi_lon = []
col_bus_sub = []
counter = -1
def dist(coord1,coord2,_type):
radius = 6371.0
if _type == 'lat':
dlat = np.deg2rad(coord2-coord1)
dlon = np.deg2rad(0.0)
a = np.sin(dlat/2.0)**2 + np.cos(np.deg2rad(coord1)) * np.cos(np.deg2rad(coord2)) * np.sin(dlon/2.0) * np.sin(dlon/2.0)
c = 2.0 * np.arctan2(a**.5, (1.0-a)**.5)
d = radius * c
return d/1.6
elif _type == 'lon':
dlat = np.deg2rad(0.0)
dlon = np.deg2rad(coord2-coord1)
a = np.sin(dlat/2.0)**2 + np.cos(np.deg2rad(0.0)) * np.cos(np.deg2rad(0.0)) * np.sin(dlon/2.0) * np.sin(dlon/2.0)
c = 2.0 * np.arctan2(a**.5, (1.0-a)**.5)
d = radius * c
return (d/1.6)/1.311260927412249
# input comes from STDIN (stream data that goes to the program)
for line in sys.stdin:
try:
#Fill in your map code here. To write to output file, use "print"
# remove leading and trailing whitespace
csv_file = StringIO.StringIO(line)
csv_reader = csv.reader(csv_file)
except:
pass
for l in csv_reader:
if len(l) == 5:
if (l[0] == '' or l[1] == '' or l[2] == '' or l[3] == '' or l[3] == '0' or l[4] == '' or l[4] == '0'):
pass
else:
wifi_year.append(l[0])
wifi_month.append(l[1])
wifi_day.append(l[2])
wifi_lat.append(l[3])
wifi_lon.append(l[4])
elif (len(l) == 13):
if (l[9] == '' or l[10] == '' or l[9] == '0' or l[10] == '0'):
pass
else:
col_bus_sub_year.append(l[0].split(',')[0].split('-')[0])
col_bus_sub_month.append(l[0].split(',')[0].split('-')[1])
col_bus_sub_day.append(l[0].split(',')[0].split('-')[2])
col_bus_sub_lat.append(l[9])
col_bus_sub_lon.append(l[10])
col_bus_sub.append(','.join(map(str, l)).strip())
else:
pass
flag = 0
for lat_col_bus_sub, lon_col_bus_sub in zip(col_bus_sub_lat,col_bus_sub_lon):
wifi_counter = -1
counter += 1
flag = 0
for lat_wifi, lon_wifi in zip(wifi_lat,wifi_lon):
wifi_counter += 1
lat_d = dist(float(lat_col_bus_sub),float(lat_wifi),'lat')
lon_d = dist(float(lon_col_bus_sub),float(lon_wifi),'lon')
distance = lat_d + lon_d
if (distance <= 0.031): # 0.015625
if (int(col_bus_sub_year[counter]) > int(wifi_year[wifi_counter])):
flag = 1
elif (int(col_bus_sub_year[counter]) == int(wifi_year[wifi_counter])):
if (int(col_bus_sub_month[counter]) > int(wifi_month[wifi_counter])):
print '%s' %('ok')
flag = 1
elif (int(col_bus_sub_month[counter]) == int(wifi_month[wifi_counter])):
if (int(col_bus_sub_day[counter]) > int(wifi_day[wifi_counter])):
flag = 1
else:
pass
else:
pass
else:
pass
else:
pass
if flag == 1:
print '%s,%d' %(col_bus_sub[counter],1)
elif flag == 0:
print '%s,%d' %(col_bus_sub[counter],0)
else:
pass | [
"Rafa@192.168.1.139"
] | Rafa@192.168.1.139 |
9cfa025c1c6cdcd1e98a7044b1aaa4b444395e64 | 8a7c56ea3eb73518cdf8d898f6a6f5883b105ec7 | /src/trace_msg_bfms/trace_msg_bfm.py | 548bfed2f68f6a15d64ab0b742786b1635c0a1a2 | [
"Apache-2.0"
] | permissive | pybfms/pybfms-trace-msg | cb8b5017af20ce52697d960f8d48574370459416 | 86eb7f7530a04c50ed79c88cb5ae452983dc31ed | refs/heads/main | 2023-06-08T08:37:53.333590 | 2021-06-28T01:47:18 | 2021-06-28T01:47:18 | 368,890,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py |
import pybfms
@pybfms.bfm(hdl={
pybfms.BfmType.Verilog : pybfms.bfm_hdl_path(__file__, "hdl/trace_msg_bfm.v"),
pybfms.BfmType.SystemVerilog : pybfms.bfm_hdl_path(__file__, "hdl/trace_msg_bfm.v"),
}, has_init=True)
class TraceMsgBfm():
def __init__(self):
self.busy = pybfms.lock()
self.is_reset = False
self.reset_ev = pybfms.event()
self.msg_sz = 0
pass
@pybfms.export_task(pybfms.uint32_t)
def _set_parameters(self, msg_sz):
print("TraceBFM: msg_sz=%d" % msg_sz)
self.msg_sz = msg_sz
@pybfms.export_task()
def _reset(self):
self.is_reset = True
self.reset_ev.set()
def set_msg(self, idx, msg):
self._set_msg(idx, msg)
def clr_msg(self, idx):
self._clr_msg(idx)
def _set_msg(self, idx, msg):
self._clr_msg(idx)
if len(msg) > self.msg_sz:
msg = msg[0:self.msg_sz-3]
msg += "..."
for i,c in enumerate(msg.encode()):
self._set_msg_c(idx, i, c)
@pybfms.import_task(pybfms.uint8_t,pybfms.uint8_t,pybfms.uint8_t)
def _set_msg_c(self, msg, idx, c):
pass
@pybfms.import_task(pybfms.uint8_t)
def _clr_msg(self, idx):
pass
| [
"matt.ballance@gmail.com"
] | matt.ballance@gmail.com |
f9ef15b9bfd358fbcf78dc3c8a2c94f18e736c13 | b61573aeb976040f0b1ba67900ec28b14a2652dc | /torchaudio/functional/__init__.py | bf27168cbbd127df9a24eecd273c32234c1e630d | [
"BSD-2-Clause"
] | permissive | TrendingTechnology/audio | a84c7408f78a20cf6cf5456e2f12b284491a28ce | 2aad928903f2f0f9a05af9a68ac2ed203faf1093 | refs/heads/main | 2023-07-08T03:48:08.350672 | 2021-08-05T17:52:02 | 2021-08-05T17:52:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,820 | py | from .functional import (
amplitude_to_DB,
angle,
complex_norm,
compute_deltas,
compute_kaldi_pitch,
create_dct,
create_fb_matrix,
melscale_fbanks,
linear_fbanks,
DB_to_amplitude,
detect_pitch_frequency,
griffinlim,
magphase,
mask_along_axis,
mask_along_axis_iid,
mu_law_encoding,
mu_law_decoding,
phase_vocoder,
sliding_window_cmn,
spectrogram,
spectral_centroid,
apply_codec,
resample,
edit_distance,
pitch_shift,
)
from .filtering import (
allpass_biquad,
band_biquad,
bandpass_biquad,
bandreject_biquad,
bass_biquad,
biquad,
contrast,
dither,
dcshift,
deemph_biquad,
equalizer_biquad,
flanger,
gain,
highpass_biquad,
lfilter,
lowpass_biquad,
overdrive,
phaser,
riaa_biquad,
treble_biquad,
vad,
)
__all__ = [
'amplitude_to_DB',
'angle',
'complex_norm',
'compute_deltas',
'compute_kaldi_pitch',
'create_dct',
'create_fb_matrix',
'melscale_fbanks',
'linear_fbanks',
'DB_to_amplitude',
'detect_pitch_frequency',
'griffinlim',
'magphase',
'mask_along_axis',
'mask_along_axis_iid',
'mu_law_encoding',
'mu_law_decoding',
'phase_vocoder',
'sliding_window_cmn',
'spectrogram',
'spectral_centroid',
'allpass_biquad',
'band_biquad',
'bandpass_biquad',
'bandreject_biquad',
'bass_biquad',
'biquad',
'contrast',
'dither',
'dcshift',
'deemph_biquad',
'equalizer_biquad',
'flanger',
'gain',
'highpass_biquad',
'lfilter',
'lowpass_biquad',
'overdrive',
'phaser',
'riaa_biquad',
'treble_biquad',
'vad',
'apply_codec',
'resample',
'edit_distance',
'pitch_shift',
]
| [
"noreply@github.com"
] | TrendingTechnology.noreply@github.com |
6753a026018172376dbb6693b26fef5fc2ed39e7 | 9142c3ebb20bdeab4d2b7e8e70ab562ce65cfe59 | /thermosteam/utils/decorators/units_of_measure.py | f631bb7d39c4e3b85727446e02a6a19a0ae8d2bf | [
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"NCSA"
] | permissive | BioSTEAMDevelopmentGroup/thermosteam | ce97fe2e5e2a5d21a9715435f653e3ee7e706e00 | 934e99441acfdb89d72dc99fee7b9c6def5aef6f | refs/heads/master | 2023-08-08T00:17:47.366975 | 2023-07-28T05:28:31 | 2023-07-28T05:28:31 | 219,133,879 | 46 | 14 | NOASSERTION | 2023-02-16T19:44:16 | 2019-11-02T09:50:30 | Python | UTF-8 | Python | false | false | 2,204 | py | # -*- coding: utf-8 -*-
# BioSTEAM: The Biorefinery Simulation and Techno-Economic Analysis Modules
# Copyright (C) 2020-2023, Yoel Cortes-Pena <yoelcortes@gmail.com>
#
# This module is under the UIUC open-source license. See
# github.com/BioSTEAMDevelopmentGroup/biosteam/blob/master/LICENSE.txt
# for license details.
"""
"""
import thermosteam as tmo
from typing import Optional
__all__ = ('units_of_measure',)
def units_of_measure(dct, cls=None):
if cls is None:
return lambda cls: units_of_measure(dct, cls)
else:
cls.define_property = define_property
cls._units_of_measure = dct
cls.get_property = get_property
cls.set_property = set_property
return cls
@classmethod
def define_property(cls, name, units, fget, fset=None):
cls._units_of_measure[name] = tmo.units_of_measure.AbsoluteUnitsOfMeasure(units)
if hasattr(cls, name): raise ValueError(f"property with name '{name}' already exists")
setattr(cls, name, property(fget, fset))
def get_property(self, name: str, units: Optional[str]=None):
"""
Return property in requested units.
Parameters
----------
name :
Name of property.
units :
Units of measure. Defaults to the property's original units of measure.
"""
value = getattr(self, name)
if units is None:
return value
else:
units_dct = self._units_of_measure
if name in units_dct:
original_units = units_dct[name]
else:
raise ValueError(f"'{name}' is not a property")
return original_units.convert(value, units)
def set_property(self, name: str, value: float, units: Optional[str]=None):
"""
Set property in given units.
Parameters
----------
name :
Name of property.
value :
New value of property.
units :
Units of measure.
"""
units_dct = self._units_of_measure
if name in units_dct:
if units is not None:
original_units = units_dct[name]
value = original_units.unconvert(value, units)
setattr(self, name, value)
else:
raise ValueError(f"no property with name '{name}'")
| [
"yoelcortes@gmail.com"
] | yoelcortes@gmail.com |
bef2bbff53a217bbd10fec6672006ab131eda1d4 | 26bd175ffb3bd204db5bcb70eec2e3dfd55fbe9f | /exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/web_infrastructure/sophos_utm/utm_network_interface_address_info.py | dc68ac3521998969963c0e69734771dd04f673e6 | [
"GPL-3.0-only",
"MIT",
"GPL-3.0-or-later",
"CC0-1.0",
"GPL-1.0-or-later"
] | permissive | tr3ck3r/linklight | 37814ed19173d893cdff161355d70a1cf538239b | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | refs/heads/master | 2021-04-11T04:33:02.727318 | 2020-03-25T17:38:41 | 2020-03-25T17:38:41 | 248,992,437 | 0 | 0 | MIT | 2020-03-21T14:26:25 | 2020-03-21T14:26:25 | null | UTF-8 | Python | false | false | 2,624 | py | #!/usr/bin/python
# Copyright: (c) 2018, Juergen Wiebe <wiebe@e-spirit.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: utm_network_interface_address_info
author:
- Juergen Wiebe (@steamx)
short_description: Get info for a network/interface_address object
description:
- Get info for a network/interface_address object in SOPHOS UTM.
options:
name:
description:
- The name of the object. Will be used to identify the entry
required: true
extends_documentation_fragment:
- community.general.utm
'''
EXAMPLES = """
- name: utm network interface address
utm_proxy_interface_address_info:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestNetworkInterfaceAddress
"""
RETURN = """
result:
description: The utm object that was created
returned: success
type: complex
contains:
_ref:
description: The reference name of the object
type: str
_locked:
description: Whether or not the object is currently locked
type: bool
_type:
description: The type of the object
type: str
name:
description: The name of the object
type: str
address:
description: The ip4 address of the network/interface_address object
type: str
address6:
description: The ip6 address of the network/interface_address object
type: str
comment:
description: The comment string
type: str
resolved:
description: Whether or not the object is resolved
type: bool
resolved6:
description: Whether or not the object is resolved
type: bool
"""
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
from ansible.module_utils._text import to_native
def main():
endpoint = "network/interface_address"
key_to_check_for_changes = []
module = UTMModule(
argument_spec=dict(
name=dict(type='str', required=True)
)
)
try:
UTM(module, endpoint, key_to_check_for_changes, info_only=True).execute()
except Exception as e:
module.fail_json(msg=to_native(e))
if __name__ == '__main__':
main()
| [
"joshuamadison+gh@gmail.com"
] | joshuamadison+gh@gmail.com |
f0bc48de184d0f592a8679fb2cd6a0b661390c25 | 7464f02b4acbf9a34b46ea0411afd934e2d21997 | /tensorflow_probability/python/distributions/half_student_t.py | efec4bc9e14ae6785f8e920046c2fa77c1224591 | [
"Apache-2.0"
] | permissive | mabu-dev/probability | c2aaa353b56a5b1a73395b4187d82809615eb9c7 | b3d884dc119a2349a8e087d2deaed162f6ec945c | refs/heads/master | 2022-04-22T13:11:08.751328 | 2020-04-11T10:46:00 | 2020-04-11T10:48:11 | 255,320,504 | 1 | 0 | Apache-2.0 | 2020-04-13T12:31:03 | 2020-04-13T12:31:02 | null | UTF-8 | Python | false | false | 13,350 | py | # Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Half-Student's T Distribution Class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.bijectors import chain as chain_bijector
from tensorflow_probability.python.bijectors import exp as exp_bijector
from tensorflow_probability.python.bijectors import shift as shift_bijector
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import student_t
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import distribution_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import prefer_static
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
__all__ = [
'HalfStudentT',
]
class HalfStudentT(distribution.Distribution):
"""Half-Student's t distribution.
The half-Student's t distribution has three parameters: degree of freedom
`df`, location `loc`, and scale `scale`. It represents the right half of the
two symmetric halves in a [Student's t
distribution](https://en.wikipedia.org/wiki/Student%27s_t-distribution).
#### Mathematical Details
The probability density function (pdf) for the half-Student's t distribution
is given by
```none
pdf(x; df, loc, scale) = (1 + y**2 / df)**(-0.5 (df + 1)) / Z,
where
y = (x - loc) / scale
Z = 2 * scale * sqrt(df * pi) * gamma(0.5 * df) / gamma(0.5 * (df + 1))
```
where:
* `df` is a positive scalar in `R`,
* `loc` is a scalar in `R`,
* `scale` is a positive scalar in `R`,
* `Z` is the normalization constant, and
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The support of the distribution is given by the interval `[loc, infinity)`.
Samples of this distribution are reparameterized (pathwise differentiable).
The derivatives are computed using the approach described in the paper
[Michael Figurnov, Shakir Mohamed, Andriy Mnih.
Implicit Reparameterization Gradients, 2018](https://arxiv.org/abs/1805.08498)
#### Examples
```python
import tensorflow_probability as tfp
tfd = tfp.distributions
# Define a single scalar Student t distribution.
single_dist = tfd.HalfStudentT(df=3, loc=0, scale=1)
# Evaluate the pdf at 1, returning a scalar Tensor.
single_dist.prob(1.)
# Define a batch of two scalar valued half Student t's.
# The first has degrees of freedom 2, mean 1, and scale 11.
# The second 3, 2 and 22.
multi_dist = tfd.HalfStudentT(df=[2, 3], loc=[1, 2], scale=[11, 22])
# Evaluate the pdf of the first distribution at 1.5, and the second on 2.5,
# returning a length two tensor.
multi_dist.prob([1.5, 2.5])
# Get 3 samples, returning a 3 x 2 tensor.
multi_dist.sample(3)
```
Arguments are broadcast when possible.
```python
# Define a batch of two half Student's t distributions.
# Both have df 2 and mean 1, but different scales.
dist = tfd.HalfStudentT(df=2, loc=1, scale=[11, 22.])
# Evaluate the pdf of both distributions on the same point, 3.0,
# returning a length 2 tensor.
dist.prob(3.0)
```
Compute the gradients of samples w.r.t. the parameters via implicit
reparameterization through the gamma:
```python
df = tf.constant(2.0)
loc = tf.constant(2.0)
scale = tf.constant(11.0)
dist = tfd.HalfStudentT(df=df, loc=loc, scale=scale)
with tf.GradientTape() as tape:
tape.watch((df, loc, scale))
loss = tf.reduce_mean(dist.sample(5))
# Unbiased stochastic gradients of the loss function
grads = tape.gradient(loss, (df, loc, scale))
```
"""
def __init__(self,
df,
loc,
scale,
validate_args=False,
allow_nan_stats=True,
name='HalfStudentT'):
"""Construct a half-Student's t distribution.
Args:
df: Floating-point `Tensor`. The degrees of freedom of the
distribution(s). `df` must contain only positive values.
loc: Floating-point `Tensor`; the location(s) of the distribution(s).
scale: Floating-point `Tensor`; the scale(s) of the distribution(s). Must
contain only positive values.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs. Default value: `False` (i.e. do not validate args).
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or more
of the statistic's batch members are undefined.
Default value: `True`.
name: Python `str` name prefixed to Ops created by this class.
Default value: 'HalfStudentT'.
Raises:
TypeError: if `loc` and `scale` have different `dtype`.
"""
parameters = dict(locals())
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([df, loc, scale], dtype_hint=tf.float32)
self._df = tensor_util.convert_nonref_to_tensor(
df, name='df', dtype=dtype)
self._loc = tensor_util.convert_nonref_to_tensor(
loc, name='loc', dtype=dtype)
self._scale = tensor_util.convert_nonref_to_tensor(
scale, name='scale', dtype=dtype)
dtype_util.assert_same_float_dtype((self._df, self._loc, self._scale))
super(HalfStudentT, self).__init__(
dtype=dtype,
reparameterization_type=reparameterization.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@staticmethod
def _param_shapes(sample_shape):
return dict(
zip(('df', 'loc', 'scale'),
([tf.convert_to_tensor(sample_shape, dtype=tf.int32)] * 3)))
@classmethod
def _params_event_ndims(cls):
return dict(df=0, loc=0, scale=0)
@property
def df(self):
"""Distribution parameter for the degrees of freedom."""
return self._df
@property
def loc(self):
"""Distribution parameter for the location."""
return self._loc
@property
def scale(self):
"""Distribution parameter for the scale."""
return self._scale
def _batch_shape_tensor(self, df=None, loc=None, scale=None):
return prefer_static.broadcast_shape(
prefer_static.shape(self.df if df is None else df),
prefer_static.broadcast_shape(
prefer_static.shape(self.loc if loc is None else loc),
prefer_static.shape(self.scale if scale is None else scale)))
def _batch_shape(self):
return tf.broadcast_static_shape(
tf.broadcast_static_shape(self.df.shape, self.loc.shape),
self.scale.shape)
def _event_shape_tensor(self):
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _sample_n(self, n, seed=None):
df = tf.convert_to_tensor(self.df)
loc = tf.convert_to_tensor(self.loc)
scale = tf.convert_to_tensor(self.scale)
batch_shape = self._batch_shape_tensor(df=df, loc=loc, scale=scale)
samples = student_t.sample_n(
n,
df=df,
loc=tf.zeros_like(loc),
scale=scale,
batch_shape=batch_shape,
dtype=self.dtype,
seed=seed)
return tf.math.abs(samples) + self.loc
def _log_prob(self, x):
df = tf.convert_to_tensor(self.df)
loc = tf.convert_to_tensor(self.loc)
scale = tf.convert_to_tensor(self.scale)
safe_x = tf.where(x < loc, 0.5 * scale + loc, x) # avoid NaNs below
# Where defined, log prob is twice StudentT log prob.
log_prob = student_t.log_prob(
safe_x, df=df, loc=loc, scale=scale) + np.log(2.)
return tf.where(x < loc,
dtype_util.as_numpy_dtype(self.dtype)(-np.inf), log_prob)
def _cdf(self, x):
# If F(t) is the cdf of a symmetric f,
# 2 * F(t) - 1 is the cdf of abs(f) for t > loc
df = tf.convert_to_tensor(self.df)
loc = tf.convert_to_tensor(self.loc)
scale = tf.convert_to_tensor(self.scale)
safe_x = tf.where(x < loc, 0.5 * scale + loc, x)
cdf = student_t.cdf(safe_x, df, loc, scale)
return tf.where(x < loc,
dtype_util.as_numpy_dtype(self.dtype)(0.),
2. * cdf - 1)
def _entropy(self):
# Symmetric half-P entropy is
# entropy(P) - log(2)
df = tf.convert_to_tensor(self.df)
scale = tf.convert_to_tensor(self.scale)
batch_shape = self._batch_shape_tensor(df=df, scale=scale)
return student_t.entropy(df, scale, batch_shape, self.dtype) - np.log(2.)
@distribution_util.AppendDocstring(
"""The mean of a half-Student's t is defined if `df > 1`, otherwise it is
`NaN`. If `self.allow_nan_stats=False`, then an exception will be raised
rather than returning `NaN`.""")
def _mean(self):
df = tf.convert_to_tensor(self.df)
loc = tf.convert_to_tensor(self.loc)
scale = tf.convert_to_tensor(self.scale)
log_correction = (
tf.math.log(scale) + np.log(2.) + 0.5 *
(tf.math.log(df) - np.log(np.pi)) + tf.math.lgamma(0.5 * (df + 1.)) -
tf.math.lgamma(0.5 * df) - tf.math.log(df - 1))
mean = tf.math.exp(log_correction) + loc
if self.allow_nan_stats:
return tf.where(df > 1., mean,
dtype_util.as_numpy_dtype(self.dtype)(np.nan))
else:
return distribution_util.with_dependencies([
assert_util.assert_less(
tf.ones([], dtype=self.dtype),
df,
message='mean not defined for components of df <= 1'),
], mean)
@distribution_util.AppendDocstring("""
The variance for half-Student's t is
```
defined, when df > 2
infinity, when 1 < df <= 2
NaN, when df <= 1
```
""")
def _variance(self):
df = tf.convert_to_tensor(self.df)
scale = tf.convert_to_tensor(self.scale)
# We need to put the tf.where inside the outer tf.where to ensure we never
# hit a NaN in the gradient.
first_denom = tf.where(df > 2., df - 2., 1.)
second_denom = tf.where(df > 1., df - 1., 1.)
var = (
tf.ones(self._batch_shape_tensor(df=df, scale=scale),
dtype=self.dtype) * tf.square(scale) * df / first_denom -
tf.math.exp(2. * tf.math.log(scale) + np.log(4.) + tf.math.log(df) -
np.log(np.pi) - 2. * tf.math.log(second_denom) + 2. *
(tf.math.lgamma(0.5 *
(df + 1.)) - tf.math.lgamma(0.5 * df))))
# When 1 < df <= 2, variance is infinite.
result_where_defined = tf.where(
df > 2., var,
dtype_util.as_numpy_dtype(self.dtype)(np.inf))
if self.allow_nan_stats:
return tf.where(df > 1., result_where_defined,
dtype_util.as_numpy_dtype(self.dtype)(np.nan))
else:
return distribution_util.with_dependencies([
assert_util.assert_less(
tf.ones([], dtype=self.dtype),
df,
message='variance not defined for components of df <= 1'),
], result_where_defined)
def _default_event_space_bijector(self):
return chain_bijector.Chain([
shift_bijector.Shift(shift=self.loc, validate_args=self.validate_args),
exp_bijector.Exp(validate_args=self.validate_args)
],
validate_args=self.validate_args)
def _sample_control_dependencies(self, x):
"""Checks the validity of a sample."""
assertions = []
if not self.validate_args:
return assertions
loc = tf.convert_to_tensor(self.loc)
assertions.append(
assert_util.assert_greater_equal(
x, loc, message='Sample must be greater than or equal to `loc`.'))
return assertions
def _parameter_control_dependencies(self, is_init):
if not self.validate_args:
return []
assertions = []
if is_init != tensor_util.is_ref(self.scale):
assertions.append(
assert_util.assert_positive(
self.scale, message='Argument `scale` must be positive.'))
if is_init != tensor_util.is_ref(self.df):
assertions.append(
assert_util.assert_positive(
self.df, message='Argument `df` must be positive.'))
return assertions
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
a3e6f84aabc48b8319995511b742d221aa8a1507 | 4b02aa96b41c7852678e7c9b3361830b2d1a1a09 | /LeetCode-solution/problems/palindrome_linked_list/solution.py | 93957b43d3eafe756306421566a762c41d6dcb74 | [] | no_license | arifkhan1990/LeetCode-solution | 4a4124d6b41dc516b673d1b1adc693054a00509f | 85e1a3a285ee059dce091621b79312ba96024eed | refs/heads/master | 2023-01-13T17:26:13.720649 | 2023-01-12T17:35:39 | 2023-01-12T17:35:39 | 243,922,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 699 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def isPalindrome(self, head: ListNode) -> bool:
if not head or not head.next:
return True
slow = fast = curr = head
while fast and fast.next:
fast, slow = fast.next.next, slow.next
stack = [slow.val]
while slow.next:
slow = slow.next
stack.append(slow.val)
while stack:
if stack.pop() != curr.val:
return False
curr = curr.next
return True | [
"arifkhanshubro@gmail.com"
] | arifkhanshubro@gmail.com |
2363944ebe89bab7ab6f8ff18fdbba8f9a843a45 | 18aee5d93a63eab684fe69e3aa0abd1372dd5d08 | /python/paddle/jit/dy2static/logging_utils.py | b8a6e5f4b63885e689fe1dbba3decac805ce38a7 | [
"Apache-2.0"
] | permissive | Shixiaowei02/Paddle | 8d049f4f29e281de2fb1ffcd143997c88078eadb | 3d4d995f26c48f7792b325806ec3d110fc59f6fc | refs/heads/develop | 2023-06-26T06:25:48.074273 | 2023-06-14T06:40:21 | 2023-06-14T06:40:21 | 174,320,213 | 2 | 1 | Apache-2.0 | 2022-12-28T05:14:30 | 2019-03-07T10:09:34 | C++ | UTF-8 | Python | false | false | 9,074 | py | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import threading
from paddle.fluid import log_helper
from .ast_utils import ast_to_source_code
__all__ = []
VERBOSITY_ENV_NAME = 'TRANSLATOR_VERBOSITY'
CODE_LEVEL_ENV_NAME = 'TRANSLATOR_CODE_LEVEL'
DEFAULT_VERBOSITY = -1
DEFAULT_CODE_LEVEL = -1
LOG_AllTransformer = 100
def synchronized(func):
def wrapper(*args, **kwargs):
with threading.Lock():
return func(*args, **kwargs)
return wrapper
class TranslatorLogger:
"""
class for Logging and debugging during the tranformation from dygraph to static graph.
The object of this class is a singleton.
"""
@synchronized
def __new__(cls, *args, **kwargs):
if not hasattr(cls, '_instance'):
cls._instance = object.__new__(cls, *args, **kwargs)
cls._instance._initialized = False
return cls._instance
def __init__(self):
if self._initialized:
return
self._initialized = True
self.logger_name = "Dynamic-to-Static"
self._logger = log_helper.get_logger(
self.logger_name,
1,
fmt='%(asctime)s %(name)s %(levelname)s: %(message)s',
)
self._verbosity_level = None
self._transformed_code_level = None
self._need_to_echo_log_to_stdout = None
self._need_to_echo_code_to_stdout = None
@property
def logger(self):
return self._logger
@property
def verbosity_level(self):
if self._verbosity_level is not None:
return self._verbosity_level
else:
return int(os.getenv(VERBOSITY_ENV_NAME, DEFAULT_VERBOSITY))
@verbosity_level.setter
def verbosity_level(self, level):
self.check_level(level)
self._verbosity_level = level
@property
def transformed_code_level(self):
if self._transformed_code_level is not None:
return self._transformed_code_level
else:
return int(os.getenv(CODE_LEVEL_ENV_NAME, DEFAULT_CODE_LEVEL))
@transformed_code_level.setter
def transformed_code_level(self, level):
self.check_level(level)
self._transformed_code_level = level
@property
def need_to_echo_log_to_stdout(self):
if self._need_to_echo_log_to_stdout is not None:
return self._need_to_echo_log_to_stdout
return False
@need_to_echo_log_to_stdout.setter
def need_to_echo_log_to_stdout(self, log_to_stdout):
assert isinstance(log_to_stdout, (bool, type(None)))
self._need_to_echo_log_to_stdout = log_to_stdout
@property
def need_to_echo_code_to_stdout(self):
if self._need_to_echo_code_to_stdout is not None:
return self._need_to_echo_code_to_stdout
return False
@need_to_echo_code_to_stdout.setter
def need_to_echo_code_to_stdout(self, code_to_stdout):
assert isinstance(code_to_stdout, (bool, type(None)))
self._need_to_echo_code_to_stdout = code_to_stdout
def check_level(self, level):
if isinstance(level, (int, type(None))):
rv = level
else:
raise TypeError(f"Level is not an integer: {level}")
return rv
def has_code_level(self, level):
level = self.check_level(level)
return level == self.transformed_code_level
def has_verbosity(self, level):
"""
Checks whether the verbosity level set by the user is greater than or equal to the log level.
Args:
level(int): The level of log.
Returns:
True if the verbosity level set by the user is greater than or equal to the log level, otherwise False.
"""
level = self.check_level(level)
return self.verbosity_level >= level
def error(self, msg, *args, **kwargs):
self.logger.error(msg, *args, **kwargs)
if self.need_to_echo_log_to_stdout:
self._output_to_stdout('ERROR: ' + msg, *args)
def warn(self, msg, *args, **kwargs):
if self.verbosity_level != -1:
self.logger.warning(msg, *args, **kwargs)
if self.need_to_echo_log_to_stdout:
self._output_to_stdout('WARNING: ' + msg, *args)
def log(self, level, msg, *args, **kwargs):
if self.has_verbosity(level):
msg_with_level = f'(Level {level}) {msg}'
self.logger.info(msg_with_level, *args, **kwargs)
if self.need_to_echo_log_to_stdout:
self._output_to_stdout('INFO: ' + msg_with_level, *args)
def log_transformed_code(
self, level, ast_node, transformer_name, *args, **kwargs
):
if self.has_code_level(level):
source_code = ast_to_source_code(ast_node)
if level == LOG_AllTransformer:
header_msg = "After the last level ast transformer: '{}', the transformed code:\n".format(
transformer_name
)
else:
header_msg = "After the level {} ast transformer: '{}', the transformed code:\n".format(
level, transformer_name
)
msg = header_msg + source_code
self.logger.info(msg, *args, **kwargs)
if self.need_to_echo_code_to_stdout:
self._output_to_stdout('INFO: ' + msg, *args)
def _output_to_stdout(self, msg, *args):
msg = self.logger_name + ' ' + msg
print(msg % args)
_TRANSLATOR_LOGGER = TranslatorLogger()
def set_verbosity(level=0, also_to_stdout=False):
"""
Sets the verbosity level of log for dygraph to static graph. Logs can be output to stdout by setting `also_to_stdout`.
There are two means to set the logging verbosity:
1. Call function `set_verbosity`
2. Set environment variable `TRANSLATOR_VERBOSITY`
**Note**:
`set_verbosity` has a higher priority than the environment variable.
Args:
level(int): The verbosity level. The larger value idicates more verbosity.
The default value is 0, which means no logging.
also_to_stdout(bool): Whether to also output log messages to `sys.stdout`.
Examples:
.. code-block:: python
import os
import paddle
paddle.jit.set_verbosity(1)
# The verbosity level is now 1
os.environ['TRANSLATOR_VERBOSITY'] = '3'
# The verbosity level is now 3, but it has no effect because it has a lower priority than `set_verbosity`
"""
_TRANSLATOR_LOGGER.verbosity_level = level
_TRANSLATOR_LOGGER.need_to_echo_log_to_stdout = also_to_stdout
def get_verbosity():
return _TRANSLATOR_LOGGER.verbosity_level
def set_code_level(level=LOG_AllTransformer, also_to_stdout=False):
"""
Sets the level to print code from specific level Ast Transformer. Code can be output to stdout by setting `also_to_stdout`.
There are two means to set the code level:
1. Call function `set_code_level`
2. Set environment variable `TRANSLATOR_CODE_LEVEL`
**Note**:
`set_code_level` has a higher priority than the environment variable.
Args:
level(int): The level to print code. Default is 100, which means to print the code after all AST Transformers.
also_to_stdout(bool): Whether to also output code to `sys.stdout`.
Examples:
.. code-block:: python
import paddle
paddle.jit.set_code_level(2)
# It will print the transformed code at level 2, which means to print the code after second transformer,
# as the date of August 28, 2020, it is CastTransformer.
os.environ['TRANSLATOR_CODE_LEVEL'] = '3'
# The code level is now 3, but it has no effect because it has a lower priority than `set_code_level`
"""
_TRANSLATOR_LOGGER.transformed_code_level = level
_TRANSLATOR_LOGGER.need_to_echo_code_to_stdout = also_to_stdout
def get_code_level():
return _TRANSLATOR_LOGGER.transformed_code_level
def error(msg, *args, **kwargs):
_TRANSLATOR_LOGGER.error(msg, *args, **kwargs)
def warn(msg, *args, **kwargs):
_TRANSLATOR_LOGGER.warn(msg, *args, **kwargs)
def log(level, msg, *args, **kwargs):
_TRANSLATOR_LOGGER.log(level, msg, *args, **kwargs)
def log_transformed_code(level, ast_node, transformer_name, *args, **kwargs):
_TRANSLATOR_LOGGER.log_transformed_code(
level, ast_node, transformer_name, *args, **kwargs
)
| [
"noreply@github.com"
] | Shixiaowei02.noreply@github.com |
511ca973f7ddab124a8fa1f2c8be06fb5db51303 | 2abd291027ea831fe85ffa8d929e769612f1bc9c | /09 - ginger/app/models/user.py | 7f7a4258475c332502476fd5fd84159c8605f73a | [] | no_license | hippieZhou/Python-Web-Every-Day | 13767ec8fbacfca29e3add0a88976b2afa74d5f5 | 7fc3f4b1378daccdfabc3ca6b66d4f5d4a98e360 | refs/heads/master | 2022-03-10T00:16:25.333925 | 2022-03-05T02:40:26 | 2022-03-05T02:40:26 | 175,198,664 | 5 | 1 | null | 2022-03-05T02:40:51 | 2019-03-12T11:40:10 | HTML | UTF-8 | Python | false | false | 1,508 | py | from sqlalchemy import Column, Integer, String, SmallInteger
from werkzeug.security import generate_password_hash, check_password_hash
from app.models.base import Base, db
from app.libs.error_code import NotFound, AuthFailed
import datetime
class User(Base):
id = Column(Integer, primary_key=True)
email = Column(String(24), unique=True, nullable=False)
nickname = Column(String(24), unique=True)
auth = Column(SmallInteger, default=1)
# time = datetime.date(2018, 5, 20)
_password = Column('password', String(100))
def keys(self):
return ['id', 'email', 'nickname', 'auth']
@property
def password(self):
return self._password
@password.setter
def password(self, raw):
self._password = generate_password_hash(raw)
@staticmethod
def register_by_email(nickname, account, secret):
with db.auto_commit():
user = User()
user.nickname = nickname
user.email = account
user.password = secret
db.session.add(user)
@staticmethod
def verify(email, password):
user = User.query.filter_by(email=email).first_or_404()
if not user.check_password(password):
raise AuthFailed()
scope = 'AdminScope' if user.auth == 2 else 'UserScope'
return {'uid': user.id, 'scope': scope}
def check_password(self, raw):
if not self._password:
return False
return check_password_hash(self._password, raw)
| [
"hippiezhou@outlook.com"
] | hippiezhou@outlook.com |
aeb3f5158fe2c3a4ffb56b2dfcb9d92b091d9a8d | d768f07ed90c0274e2d9d935eaf5ecfe734a1f56 | /lya_statistics/old/compute_power_spectrum_statistics.py | c25a67cceabb721282d7af4e1a083a078453dd85 | [] | no_license | bvillasen/simulation_analysis | cfd0b5de865d2fb5992d828b2824079e6798774b | 645f0c397172ed30a713368942eec9ca68a9761a | refs/heads/master | 2023-06-02T19:06:39.851760 | 2021-06-25T18:40:58 | 2021-06-25T18:40:58 | 298,894,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,280 | py | import os, sys
import numpy as np
import h5py as h5
import pickle
root_dir = os.path.dirname(os.getcwd()) + '/'
subDirectories = [x[0] for x in os.walk(root_dir)]
sys.path.extend(subDirectories)
from tools import *
from stats_functions import compute_distribution, get_highest_probability_interval
use_mpi = False
if use_mpi :
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nprocs = comm.Get_size()
else:
rank = 0
nprocs = 1
print_out = False
if rank == 0: print_out = True
#
# parameters = sys.argv
# if print_out: print( parameters )
# for option in parameters:
# if option.find("uvb") != -1: uvb = option[option.find('=')+1:]
# if print_out: print( f'Snapshot: {n_snap}' )
n_points = 2048
# uvb = 'pchw18'
uvb = 'hm12'
# dataDir = '/home/bruno/Desktop/ssd_0/data/'
dataDir = '/raid/bruno/data/'
# dataDir = '/data/groups/comp-astro/bruno/'
simulation_dir = dataDir + 'cosmo_sims/2048_hydro_50Mpc/'
input_dir = simulation_dir + 'transmited_flux_{0}_review/flux_power_spectrum_new/'.format(uvb)
output_dir = simulation_dir + 'transmited_flux_{0}_review/flux_power_spectrum_new/'.format(uvb)
if rank == 0: create_directory( output_dir )
snaps = [ 83, 90, 96, 102, 119, 124, 130, 136, 143, 151, 159, 169, ]
snaps_boss = [ 96, 102, 106, 110, 114, 119, 124, 130, 136, 143, 151, 159 ]
snapshots = list( set( snaps_boss ).union(set(snaps)))
snapshots.sort()
print(snapshots)
# n_snap = 159
for n_snap in snapshots:
file_name = input_dir + f'flux_ps_{n_snap}.h5'
print( f'Loading File: {file_name}' )
file = h5.File( file_name, 'r')
current_z = file.attrs['current_z']
vel_Hubble = file['vel_Hubble'][...]
k_vals = file['k_vals'][...]
ps_vals = file['flux_power_spectrum'][...]
file.close()
n_skewers, n_bins = ps_vals.shape
vel_max = vel_Hubble.max()
print(f'N Skewers: {n_skewers} n_bins:{n_bins} ' )
n_bins_for_distribution = 100
fill_sum = 0.70
ps_stats = {}
# index = 6
for index in range( 25 ):
k_val = k_vals[index]
vel = 2*np.pi / k_val
stride = n_points * ( vel / vel_max )
n_steps = int( 2048 / stride )
stride = int( stride )
ids_1d = ( np.arange( 0, n_steps, 1 ) * stride ).astype( np.int )
n_1d = len( ids_1d )
n_independent = n_1d**2
print ( f' id: {index}, val: {k_val:.1e} n_independent: {n_independent}' )
delta_vals = ps_vals[:, index] * k_val / np.pi
delta_mean = delta_vals.mean()
delta_sigma = delta_vals.std()
distribution, bin_centers = compute_distribution( delta_vals, n_bins_for_distribution, log=True )
v_l, v_r, v_max, sum = get_highest_probability_interval( bin_centers, distribution, fill_sum, log=True, n_interpolate=1000)
ps_stats[index] = {}
ps_stats[index]['k_val'] = k_val
ps_stats[index]['bin_centers'] = bin_centers
ps_stats[index]['distribution'] = distribution
ps_stats[index]['delta_mean'] = delta_mean
ps_stats[index]['delta_sigma'] = delta_sigma
ps_stats[index]['sigma_l'] = v_l
ps_stats[index]['sigma_r'] = v_r
ps_stats[index]['sigma_max'] = v_max
ps_stats[index]['n_independent'] = n_independent
n_indp_list = []
k_list = []
mean_list, sigma_list = [], []
sigma_asim_l, sigma_asim_r = [], []
for index in range( 25 ):
n_indp_list.append( ps_stats[index]['n_independent'] )
k_list.append( ps_stats[index]['k_val'] )
mean_list.append( ps_stats[index]['delta_mean'] )
sigma_list.append( ps_stats[index]['delta_sigma'] )
sigma_asim_l.append( ps_stats[index]['sigma_l'] )
sigma_asim_r.append( ps_stats[index]['sigma_r'] )
n_independent = np.array( n_indp_list )
k_array = np.array( k_list )
mean_array = np.array( mean_list )
sigma_array = np.array( sigma_list )
sigma_l_array = np.array( sigma_asim_l )
sigma_r_array = np.array( sigma_asim_r )
ps_stats['current_z'] = current_z
ps_stats['k_vals'] = k_array
ps_stats['n_independent'] = n_independent
ps_stats['delta_mean'] = mean_array
ps_stats['delta_sigma'] = sigma_array
ps_stats['delta_sigma_l'] = sigma_l_array
ps_stats['delta_sigma_r'] = sigma_r_array
file_name = output_dir + f'stats_{n_snap}.pkl'
f = open( file_name, 'wb' )
pickle.dump( ps_stats, f)
f.close()
print ( f'Saved File: {file_name }' )
| [
"bvillasen@gmail.com"
] | bvillasen@gmail.com |
d97e7366ebd785ad4958223949829bb8e80a3fd6 | 6764aecdd7941aca93b7320949745348abdb6b8a | /code/fridge/fridge_compressor_durations.py | aa9741609725b85f76bb45a258c1a5481ee89ba0 | [] | no_license | mzhuang1/nilm-actionable | 71f8bcb548c2aad375e67a9e71dfbc76ee640333 | db626a57297ce0ced6b8c601dd18eb408150c6af | refs/heads/master | 2020-04-17T21:09:19.240317 | 2017-01-20T09:49:11 | 2017-01-20T09:49:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,322 | py | import warnings
from nilmtk import DataSet
import nilmtk
import pandas as pd
import numpy as np
warnings.filterwarnings("ignore")
from hmmlearn import hmm
ds = DataSet("/Users/nipunbatra/Downloads/wikienergy-2.h5")
fridges = nilmtk.global_meter_group.select_using_appliances(type='fridge')
Wm_to_kwh = 1.66666667* 1e-5
compressor_powers = {
1: [80, 140],
2: [80, 140],
8: [100, 400],
11: [90, 350],
13: [100, 160],
14: [70, 200],
15: [100, 200],
18: [50, 200],
22: [50, 400],
25: [50, 400],
29: [50, 120],
33: [50, 200],
34: [50, 400],
35: [100, 200],
37: [100, 200],
42: [50, 200],
43: [50, 200],
44: [50, 200],
45: [50, 200],
46: [50, 350],
47: [50, 200],
50: [100, 200],
51: [100, 150],
52: [100, 200],
54: [80, 160],
55: [60, 150],
57: [80, 200],
58: [80, 250],
59: [80, 150],
60: [80, 150],
61: [80, 150],
67: [80, 300],
68: [80, 200],
70: [80, 200],
72: [80, 300],
75: [80, 200],
76: [80, 200],
78: [80, 400],
79: [80, 400],
83: [80, 150],
84: [80, 400],
87: [80, 300],
88: [80, 200],
89: [100, 160],
92: [200, 300],
93: [100, 200],
95: [100, 220],
97: [100, 200],
99: [100, 200],
100: [100, 200],
102: [100, 200],
103: [100, 220],
104: [200, 300],
106: [100, 200],
107: [100, 200],
109: [100, 250],
110: [80, 200],
112: [100, 200],
114: [100, 200],
115: [100, 200],
116: [100, 200],
118: [80, 200],
119: [80, 150],
123: [100, 200],
124: [100, 200],
125: [100, 200],
126: [100, 200],
128: [100, 200],
129: [100, 200],
130: [100, 200],
131: [50, 350],
133: [50, 100],
134: [80, 200],
135: [100, 200],
136: [50, 200],
138: [50, 200],
139: [50, 200],
140: [50, 150],
142: [100, 200],
144: [50, 300],
145: [50, 200],
146: [50, 200],
149: [100, 200],
151: [50, 150],
152: [100, 200],
153: [100, 250],
154: [100, 220],
155: [200, 300],
157: [100, 250],
158: [100, 200],
159: [50, 350],
161: [50, 200],
163: [100, 200],
167: [100, 200],
169: [100, 250],
170: [100, 200]
}
defrost_power = {
1: 350,
2: 400,
8: 550,
11: 400,
13: 600,
14: 400,
15: 500,
18: 350,
22: 500,
25: 410,
29: 150,
33: 250,
34: 500,
35: 300,
37: 600,
42: 200,
43: 400,
44: 400,
45: 400,
46: 1000,
47: 400,
50: 500,
51: 300,
52: 400,
54: 300,
55: 400,
57: 350,
58: 350,
59: 350,
60: 400,
61: 350,
67: 400,
68: 400,
70: 350,
72: 400,
75: 400,
76: 400,
78: 600,
79: 400,
83: 300,
84: 400,
87: 300,
88: 300,
89: 420,
92: 450,
93: 550,
95: 400,
97: 600,
99: 350,
100: 500,
102: 550,
103: 600,
104: 500,
106: 350,
107: 350,
109: 550,
110: 400,
112: 400,
114: 450,
115: 400,
116: 450,
118: 400,
119: 450,
123: 450,
124: 400,
125: 400,
126: 600,
128: 350,
129: 600,
130: 450,
131: 400,
133: 280,
134: 500,
135: 400,
136: 350,
138: 400,
139: 320,
140: 300,
142: 500,
144: 600,
145: 400,
146: 380,
149: 600,
150: 600,
151: 350,
152: 400,
153: 400,
154: 400,
155: 700,
157: 400,
158: 600,
159: 400,
161: 350,
163: 400,
167: 400,
169: 500,
170: 650,
}
def find_on_off(arr):
diff_arr = np.diff(arr)
offs_indices = np.where(diff_arr == -1)[0]
ons_indices = np.where(diff_arr == 1)[0]
if offs_indices[0]<ons_indices[0]:
offs_indices = offs_indices[1:]
l = min(len(ons_indices), len(offs_indices))
offs_indices = offs_indices[:l]
ons_indices = ons_indices[:l]
return ons_indices, offs_indices
"""
def find_on_off_slow(arr):
i=1
while i<len(arr):
if arr[i] - arr[i-1]==1:
start_index.append(i)
i = i+1
#On now, wait till off found
while i<len(arr) and ((arr[i] - arr[i-1])!=-1) :
i = i+1
if i<len(arr):
stop_index.append(i)
else:
i = i+1
l = len(stop_index)
start_index = start_index[:l]
"""
def find_compressor_defrost(n):
df = fridges.meters[n].load().next()[('power', 'active')]
[compressor_min, compressor_max] = compressor_powers[n]
defrost_min = defrost_power[n]
compressor = (df > compressor_min) & (df < compressor_max)
defrost_idx = df > defrost_min
defrost = defrost_idx
compressor[defrost_idx] = False
# return compressor
# Eliminate 1 minute cycles
for i in range(len(df)-2):
if compressor.ix[i] is False and compressor.ix[i+1] is True and compressor.ix[i+2]is False:
compressor.ix[i+1] = False
elif compressor.ix[i] is True and compressor.ix[i+1] is False and compressor.ix[i+2] is True:
compressor.ix[i+1] = True
return compressor, defrost
def compute_fractions_df(df):
a, b, c, tot, mins = fractions_df(n)
return wm_to_kwh_per_month(tot, mins), wm_to_kwh_per_month(a, mins), wm_to_kwh_per_month(c, mins), wm_to_kwh_per_month(b, mins)
def compute_fractions(n):
a, b, c, tot, mins = fractions(n)
return wm_to_kwh_per_month(tot, mins), wm_to_kwh_per_month(a, mins), wm_to_kwh_per_month(c, mins), wm_to_kwh_per_month(b, mins)
def wm_to_kwh_per_month(wm, mins):
return wm*Wm_to_kwh/(mins*1.0/(1440*30))
def return_states_df(n):
df = fridges.meters[n].load().next()[('power','active')]
x = df.head(10000)
x = x[x < 2000]
x = x.reshape((len(x), 1))
# Defrost state? (N=3), else 2
model = hmm.GaussianHMM(n_components=2, covariance_type="full", n_iter=1000)
model.fit([x])
z = model.means_.reshape((2,))
z.sort()
raw_power = df.values
y1 = np.abs(raw_power - z[0])
y2 = np.abs(raw_power - z[1])
y_act = np.zeros(y1.shape)
y_act[np.where((y1-y2>0))[0]]=1
# y_act[np.where((y1-y2>0)&(y2<4*z[1]))[0]]=1
df_states = pd.Series(y_act, index=df.index)
return df_states, z[1]
def fractions(n):
f = fridges.meters[n].load().next()[('power','active')]
c, d = find_compressor_defrost(n)
power_c_sum = f[c].sum()
print power_c_sum
df_cm, df_d = find_on_off_durations(n)
baseline = df_cm.between_time("01:00", "05:00").median()
baseline_duty_percent = baseline['on']/(baseline['on']+baseline['off'])
print baseline_duty_percent
total_mins = len(f)
baseline_energy = total_mins*baseline_duty_percent*f[c].mean()
print total_mins
defrost_energy_self = f[d].sum()
defrost_energy_extra_compressor = 0.0
for i in range(len(df_d.index)):
runtime = df_cm[df_d.index[i]:].head(3)['on'].max()
if runtime > baseline['on']:
extra_run_energy = (runtime-baseline['on'])*f[c].mean()
defrost_energy_extra_compressor = defrost_energy_extra_compressor +extra_run_energy
power_c_sum = power_c_sum - extra_run_energy
defrost_energy = defrost_energy_self + defrost_energy_extra_compressor
usage_energy = power_c_sum - baseline_energy
total_energy = f.sum()
return baseline_energy, usage_energy, defrost_energy, total_energy, total_mins
def return_states_df_defrost(n):
df = fridges.meters[n].load().next()[('power','active')]
X = df.head(10000)
X = X[X<2000]
X = X.reshape((len(X),1))
# Defrost state? (N=3), else 2
model = hmm.GaussianHMM(n_components=3, covariance_type="full", n_iter=1000)
model.fit([X])
z = model.means_.reshape((3,))
z.sort()
raw_power = df.values
p = model.predict(raw_power.reshape((len(raw_power),1)))
y1 = np.abs(raw_power - z[0])
y2 = np.abs(raw_power - z[1])
y_act = np.zeros(y1.shape)
y_act[np.where((y1-y2>0)&(y2<1.2*z[1]))[0]]=1
df_states = pd.Series(y_act, index=df.index)
df_states_hmm = pd.Series(p, index=df.index)
return df_states, df_states_hmm, z
def find_weekend_indices(datetime_array):
indices = []
for i in range(len(datetime_array)):
if datetime_array[i].weekday()>=5:
indices.append(i)
return indices
def highlight_weekend(weekend_indices,ax):
i = 0
while i < len(weekend_indices):
ax.axvspan(weekend_indices[i], weekend_indices[i]+2, facecolor='green', edgecolor='none', alpha=.2)
i += 2
def find_on_off_durations(n):
c, d = find_compressor_defrost(n)
on_c, off_c = find_on_off(c.astype('int').values)
on_d, off_d = find_on_off(d.astype('int').values)
to_ignore = []
# We now need to remove the extra run of compressor due to defrost.
# We look for defrost off and ignore the next compressor cycle
for defrost_off_index in off_d:
next_compressor_index = np.where(on_c>defrost_off_index)[0][0]
to_ignore.append(next_compressor_index)
to_ignore.append(next_compressor_index+1)
to_ignore.append(next_compressor_index+2)
to_ignore.append(next_compressor_index-1)
on_duration_compressor = pd.DataFrame({"on": (off_c - on_c)[:-1],
"off": on_c[1:] - off_c[:-1]},
index=c.index[on_c[:-1]]).sort_index()
to_consider = [x for x in range(len(on_duration_compressor)) if x not in to_ignore]
on_duration_compressor_filtered = on_duration_compressor.ix[to_consider]
on_duration_defrost = pd.DataFrame({"on": (off_d - on_d)[:-1],
"off": on_d[1:] - off_d[:-1]},
index=d.index[on_d[:-1]]).sort_index()
on_duration_defrost = on_duration_defrost[on_duration_defrost.on > 10]
return on_duration_compressor_filtered, on_duration_defrost
def find_on_off_durations_with_without_filter(n):
c, d = find_compressor_defrost(n)
on_c, off_c = find_on_off(c.astype('int').values)
on_d, off_d = find_on_off(d.astype('int').values)
to_ignore =[]
# We now need to remove the extra run of compressor due to defrost.
# We look for defrost off and ignore the next compressor cycle
for defrost_off_index in off_d:
next_compressor_index = np.where(on_c>defrost_off_index)[0][0]
to_ignore.append(next_compressor_index)
to_ignore.append(next_compressor_index+1)
to_ignore.append(next_compressor_index+2)
to_ignore.append(next_compressor_index-1)
on_duration_compressor = pd.DataFrame({"on":(off_c-on_c)[:-1],
"off":on_c[1:] - off_c[:-1]}, index=c.index[on_c[:-1]]).sort_index()
to_consider = [x for x in range(len(on_duration_compressor)) if x not in to_ignore]
on_duration_compressor_filtered = on_duration_compressor.ix[to_consider]
on_duration_defrost = pd.DataFrame({"on":(off_d-on_d)[:-1],
"off":on_d[1:] - off_d[:-1]}, index=d.index[on_d[:-1]]).sort_index()
on_duration_defrost = on_duration_defrost[on_duration_defrost.on>10]
return on_duration_compressor, on_duration_compressor_filtered, on_duration_defrost
def find_baseline(n):
df_c, df_d = find_on_off_durations(n)
times = df_c.index
return df_c.groupby([times.hour]).median().min()
print "he"
o = {}
for n in compressor_powers.keys()[124:126]:
print n
if n not in o.keys():
print n
try:
o[n] = compute_fractions(n)
except:
pass
d = pd.DataFrame(o).T
d.columns = ["total", "baseline", "defrost", "usage"]
dp = d[d.usage > 0]
dp["artifical_sum"] = dp.baseline+dp.defrost+dp.usage
dp["baseline_percentage"] = dp.baseline*100/dp.artifical_sum
dp["defrost_percentage"] = dp.defrost*100/dp.artifical_sum
dp["usage_percentage"] = dp.usage*100/dp.artifical_sum
"""
ds = DataSet("/Users/nipunbatra/Downloads/wikienergy-2.h5")
original_name_dict = {b.metadata['original_name']:b.identifier.instance for b in ds.buildings.values()}
original_name_map = pd.Series(original_name_dict)
reverse_name_map = pd.Series({v:k for k,v in original_name_dict.iteritems() })
fridges = nilmtk.global_meter_group.select_using_appliances(type='fridge')
fridges_dict_original = {i:ds.buildings[fridges.meters[i].building()].metadata['original_name'] for i in range(len(fridges.meters))}
fridges_dict_nilmtk = {i:fridges.meters[i].building() for i in range(len(fridges.meters))}
fridges_map_original = pd.Series(fridges_dict_original)
fridges_map_nilmtk = pd.Series(fridges_dict_nilmtk)
to_ignore = [0, 3, 4, 5, 6, 7, 9, 10, 12, 16, 17, 19, 20, 21, 23, 24, 27, 30, 31,
32, 36, 38, 39, 40, 41, 53, 54, 58, 73, 74, 77, 82, 85, 86, 90, 91 ,
94, 95, 96, 98, 99, 101, 117, 119, 121, 122, 125, 127, 133, 137,
141, 147, 156, 157, 160, 165, 166, 170, 171, 172]
maybe = [60, 80, 81, 105, 113, 120, 126, 159, 162, 14, 46]
anomaly = [6, 48]
out = {}
for n in compressor_powers.keys()[:]:
if n not in out.keys():
print n
try:
t = find_baseline(n)
out[n] = 1.0*t.on/(t.on+t.off)
except:
pass
"""
| [
"nipunb@iiitd.ac.in"
] | nipunb@iiitd.ac.in |
e80e39f7489a25cbe588e5318e01220bb5737102 | e979b765416b947fd089339dd64732d5174e7058 | /FlattenNestedList.py | e403c2d47d188a3400f6cad8396bf089bbb8f891 | [] | no_license | 250mon/CodeWars | d86cdc8ea24bc781c9adf34c24a67195e544a4a1 | 9bea8df60646a052565ae5246144a9d53939b057 | refs/heads/main | 2023-03-15T17:00:34.500844 | 2021-03-25T03:59:49 | 2021-03-25T03:59:49 | 303,961,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | def flatten_list(n_list):
result_list = []
if not n_list:
return result_list
stack = [list(n_list)]
while stack:
c_num = stack.pop()
next = c_num.pop()
if c_num:
stack.append(c_num)
if isinstance(next, list):
if next:
stack.append(list(next))
else:
result_list.append(next)
result_list.reverse()
return result_list
if __name__ == '__main__':
test_list = [0, 10, [20, 30], 40, 50, [60, 70, 80], [90, [100, 110], 120]]
result = flatten_list(test_list)
print(result) | [
"lambki@naver.com"
] | lambki@naver.com |
2ad80a74ff04fdbe4a888ef01bd9c5e25fddc2ce | 5b95b83ba7e18cb40babab37bcb0f5b63bfef3bb | /script8.py | 1ebba089a116b4bec4fb6bc5dc27f3eecb5f4d8f | [] | no_license | Moandh81/w3ressources_python | d9269959cc35c1df4a0ca9d37575c94fb96195f6 | 7a3c65bca50097c2e9b92591443dcb6b03a384a3 | refs/heads/master | 2020-03-30T22:42:23.673212 | 2019-11-11T19:58:16 | 2019-11-11T19:58:16 | 151,675,634 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | #!/usr/bin/python
# -*- coding: utf-8 -*
#Python Data Type: List - Exercises,
#Write a Python program to print the numbers of a specified list after removing even numbers from it
liste=range(1,11)
i= 0
listepair=[]
while i<len(liste):
if liste[i] % 2 == 0:
listepair.append(liste[i])
i = i + 1
print(listepair) | [
"anis.dhouieb@gmail.com"
] | anis.dhouieb@gmail.com |
9852067a7f48d7c5a1c1a29d4b31449e2151ab87 | 4a0f8c5c0e8324fa614da776f2a704b5c369ccbb | /topologyTest/cp_to_Topology_folder.py | 6e1eac9c65ee2c283c15f3a1a8c2d39bc8d87e15 | [] | no_license | magic2du/contact_matrix | 9f8ae868d71e7e5c8088bf22a9407ea3eb073be6 | 957e2ead76fabc0299e36c1435162edd574f4fd5 | refs/heads/master | 2021-01-18T21:15:07.341341 | 2015-09-16T02:14:53 | 2015-09-16T02:14:53 | 24,237,641 | 0 | 0 | null | 2015-09-10T19:58:24 | 2014-09-19T16:48:37 | null | UTF-8 | Python | false | false | 535 | py | import os, sys
from dealFile import *
ToDoList=sys.argv[1]
listOfAll=readListFile(ToDoList)
listOfSuccess=[]
for folders in listOfAll:
if os.path.exists('/home/du/Protein_Protein_Interaction_Project/3did_15OCT2010/dom_dom_ints/'+folders):
sh='cp -ru /home/du/Protein_Protein_Interaction_Project/3did_15OCT2010/dom_dom_ints/'+folders+' /home/du/Protein_Protein_Interaction_Project/3did_15OCT2010/topologyTest/dom_dom_ints/'
os.system(sh)
listOfSuccess.append(folders)
writeListFile('listOfSuccessCopied_'+ToDoList,listOfSuccess)
| [
"magic2du@gmail.com"
] | magic2du@gmail.com |
90454f44990f308805cb1b8772805fccdc0273e4 | cc6e7f63eaf4b3570771c46fb8b24b88e6e1f59e | /beginner/154/A.py | 09f1a826e3d6bd6b508da9a58d092c35f84c391c | [] | no_license | kamojiro/atcoderall | 82a39e9be083a01c14445417597bf357e6c854a8 | 973af643c06125f52d302a5bc1d65f07a9414419 | refs/heads/master | 2022-07-12T00:14:38.803239 | 2022-06-23T10:24:54 | 2022-06-23T10:24:54 | 161,755,381 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py | #import sys
#input = sys.stdin.readline
def main():
s, t = input().split()
a, b = map( int, input().split())
u = input()
if u == s:
print(a-1, b)
else:
print(a, b-1)
if __name__ == '__main__':
main()
| [
"tamagoma002@yahoo.co.jp"
] | tamagoma002@yahoo.co.jp |
d67b434adfc58def665770ba75217dc4919beb9e | e9c4239c8064d882691314fd5b37208f10447173 | /leetcode/101-200题/177nthHighestSalary.py | 02a4420c9f64fca1244aec82b06600d9aa8dfc5f | [] | no_license | IronE-G-G/algorithm | 6f030dae6865b2f4ff4f6987b9aee06874a386c1 | 6f6d7928207534bc8fb6107fbb0d6866fb3a6e4a | refs/heads/master | 2020-09-21T03:02:20.908940 | 2020-03-22T15:19:41 | 2020-03-22T15:19:41 | 224,658,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,076 | py | """
177 第N高的薪水
编写一个 SQL 查询,获取 Employee 表中第 n 高的薪水(Salary)。
+----+--------+
| Id | Salary |
+----+--------+
| 1 | 100 |
| 2 | 200 |
| 3 | 300 |
+----+--------+
例如上述 Employee 表,n = 2 时,应返回第二高的薪水 200。如果不存在第 n 高的薪水,那么查询应返回 null。
+------------------------+
| getNthHighestSalary(2) |
+------------------------+
| 200 |
+------------------------+
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/nth-highest-salary
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
"""
CREATE FUNCTION getNthHighestSalary(N INT) RETURNS INT
BEGIN
if N<0 then
return (select min(salary) from Employee);
else
set N=N-1;
RETURN (
# Write your MySQL query statement below.
select ifnull((select distinct salary from Employee order by salary desc limit N,1),null) as NthHighestSalary);
end if;
END
"""
| [
"linjh95@163.com"
] | linjh95@163.com |
d73bf4c4e161aa160e6327bec5770961ca88b0d2 | 63b0fed007d152fe5e96640b844081c07ca20a11 | /アルゴ式/グラフアルゴリズム/Q4. ベルマンフォード法 (1).py | 0ef10ba1f767d56b39f85b831aa906cd8a37d79c | [] | no_license | Nikkuniku/AtcoderProgramming | 8ff54541c8e65d0c93ce42f3a98aec061adf2f05 | fbaf7b40084c52e35c803b6b03346f2a06fb5367 | refs/heads/master | 2023-08-21T10:20:43.520468 | 2023-08-12T09:53:07 | 2023-08-12T09:53:07 | 254,373,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | N, M = map(int, input().split())
INF = 10**9
dist = [INF]*N
dist[0] = 0
for _ in range(M):
u, v, w = map(int, input().split())
dist[v] = min(dist[v], dist[u]+w)
print(*dist, sep="\n")
| [
"ymdysk911@gmail.com"
] | ymdysk911@gmail.com |
4441a0a988cb752bf38feafdae50934f047ffde8 | 8fb3931be18a592d230d4cff3c28742a150d13cb | /intrusiondetection_server/funcs_intrusiondetection/intrusiondetectionImpl.py | 35ac3953acea155ad1042c1dc30346b54f2694f2 | [] | no_license | rvilalta/IoT-YANG | cfc12c8a679d51a4455838cc46919a2f9be82e1f | b1317fb306c7c03e098ccb4c675d56464025c173 | refs/heads/master | 2021-01-18T20:17:11.493068 | 2016-08-29T10:41:08 | 2016-08-29T10:41:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,367 | py | import os.path, sys
sys.path.append(os.path.join('/'.join(os.path.dirname(os.path.realpath(__file__)).split('/')[:-1])))
import backend.backend as be
import cv2
import sys
import imutils
import threading
import numpy as np
import datetime
from objects_intrusiondetection.room import Status
def video_name(video_counter):
video_name='test'+str(video_counter)+'.avi'
return video_name
def init_video_recorder(h,w,fps):
fourcc = cv2.cv.FOURCC(*'H264')
zeros = None
print "Starting video recording: " + video_name(video_counter)
writer = cv2.VideoWriter(video_name(video_counter), fourcc, fps, (w, h), True)
zeros = np.zeros((h, w), dtype="uint8")
return writer
def deinit_video_recorder(writer):
print "Stoping video recording"
writer.release()
is_video_init=False
writer = None
def transfer_file(filename):
#request to connect to storage server
print "transfer file " + filename
maxRetries = 20
video_counter=1
is_video_init=False
writer = None
thread1 = None
class MyThread (threading.Thread):
def __init__(self, thread_id, name, video_url, thread_lock):
threading.Thread.__init__(self)
self.thread_id = thread_id
self.name = name
self.video_url = video_url
self.thread_lock = thread_lock
self._stop = threading.Event()
def run(self):
print "Starting " + self.name
window_name = self.name
cv2.namedWindow(window_name)
video = cv2.VideoCapture(self.video_url)
video.set(4,1024)
firstFrame=None
is_video_init=False
writer = None
#GET FPS
fps=video.get(cv2.cv.CV_CAP_PROP_FPS)
MIN_AREA=250
while True:
grabbed,frame= video.read()
text="Unoccupied"
# resize the frame, convert it to grayscale, and blur it
frame_resized = imutils.resize(frame, width=500)
gray = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
# if the first frame is None, initialize it
if firstFrame is None:
firstFrame = gray
continue
# compute the absolute difference between the current frame and
# first frame
frameDelta = cv2.absdiff(firstFrame, gray)
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
# dilate the thresholded image to fill in holes, then find contours
# on thresholded image
thresh = cv2.dilate(thresh, None, iterations=2)
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < MIN_AREA:
continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame_resized, (x, y), (x + w, y + h), (0, 255, 0), 2)
text = "Occupied"
global video_counter
#Intrusion detected!
if (text=="Occupied" and is_video_init==False):
(h, w) = frame.shape[:2]
writer=init_video_recorder(h,w,fps)
is_video_init=True
be.intrusiondetection.sensors.status=Status(1)
#During intrusion we record
if text=="Occupied":
writer.write(frame)
#No longer intrusion - We store and transfer
if text=="Unoccupied" and is_video_init==True:
deinit_video_recorder(writer)
transfer_file(video_name(video_counter))
is_video_init=False
video_counter+=1
be.intrusiondetection.sensors.status=Status(2)
cv2.putText(frame, "Room Status: {}".format(text), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
cv2.imshow(window_name, frame)
cv2.imshow("Thresh", thresh)
cv2.imshow("Frame Delta", frameDelta)
cv2.imshow("Security Feed", frame_resized)
cv2.waitKey(1)
#update first frame
del(firstFrame)
firstFrame = gray
del (frame)
key = cv2.waitKey(50)
if self._stop.isSet():
break
print self.name + " Exiting"
cv2.waitKey(1000)
cv2.DestroyAllWindows()
def stop(self):
print self.name + " Stopped"
self._stop.set()
class IntrusiondetectionImpl:
@classmethod
def put(cls, intrusiondetectionschema):
print str(intrusiondetectionschema)
print 'handling put'
if "disarmed" in str(intrusiondetectionschema):
#sys.exit(0)
# Stop the thread
print "Stop thread"
global thread1
thread1.stop()
elif "armed" in str(intrusiondetectionschema):
#Start the thread
thread_lock = threading.Lock()
global thread1
thread1 = MyThread(1, "Thread 1", 0, thread_lock)
thread1.start()
@classmethod
def post(cls, intrusiondetectionschema):
print str(intrusiondetectionschema)
print 'handling post'
be.intrusiondetection = intrusiondetectionschema
@classmethod
def delete(cls, ):
print 'handling delete'
if be.intrusiondetection:
del be.intrusiondetection
else:
raise KeyError('')
@classmethod
def get(cls, ):
print 'handling get'
if be.intrusiondetection:
return be.intrusiondetection
else:
raise KeyError('')
| [
"a@a.com"
] | a@a.com |
44da64af3f47165d63c8570ec96bdb194d74670e | 2245f0acc3f5682129491b245acd3fd8ab2e4128 | /Desafio111/utilidadesCeV/ex_111.py | bd920ee20cdf37563ff655b912dc745b5276f3b7 | [] | no_license | wherculano/Curso-em-Video-Python | 89bed7b7e01f25ba47efa511304d18448a47a4bb | 5506645a46973a5ccd2c3d5c1ff782c51181b4bf | refs/heads/master | 2022-04-12T08:26:26.616135 | 2020-03-26T17:53:21 | 2020-03-26T17:53:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 724 | py | """
Crie um pacote chamado utilidadesCeV que tenha dois módulos internos chamados moeda e dado.
Transfira todas as funções utilizadas nos desafios 107, 108, 109 e 110
para o primeiro pacote e mantenha tudo funcionando.
Ex:
>>> moeda.resumo(850, 35, 22)
----------------------------------
RESUMO DO VALOR
----------------------------------
Preço analisado: R$850,00
Dobro do preço: R$1700,00
Metade do preço: R$425,00
35% de aumento: R$1147,50
22% de redução: R$663,00
----------------------------------
"""
from Desafio111.utilidadesCeV import moeda
# preco = float(input('Digite o preço: R$'))
# moeda.resumo(preco, 80, 35)
| [
"wagherculano@hotmail.com"
] | wagherculano@hotmail.com |
84a35beba16bd47d8a2654e62502bffbe5037477 | 3c114c083af073421fc0becfa4b4471ba1d77de5 | /google/two_sum_bst.py | 2b3e3f1c2703a1d548d36af103cb120dc7ea5901 | [] | no_license | alonsovidales/interview_questions | 99f757c7e35c5ede450be25d3bebd54a18b1312b | 5e63e238950c2f6bdfd3ff48311d6c69a676d382 | refs/heads/master | 2021-01-17T12:06:48.419891 | 2018-03-25T08:44:14 | 2018-03-25T08:44:14 | 30,909,319 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,019 | py | """
Given a BST and a number x, check whether exists two nodes in the BST whose sum
equals to x. You can not use one extra array to serialize the BST and do a 2sum
solver on it.
"""
class Bst(object):
class BstNode(object):
def __init__(self, v, l=None, r=None):
self.v = v
self.l = l
self.r = r
def __init__(self):
self._tree = None
def add(self, v):
if self._tree is None:
self._tree = self.BstNode(v)
else:
tree = self._tree
while True:
if tree.v > v:
if tree.l is not None:
tree = tree.l
else:
tree.l = self.BstNode(v)
return
else:
if tree.r is not None:
tree = tree.r
else:
tree.r = self.BstNode(v)
return
def exists(self, v, x=None, node=None):
if node is None:
node = self._tree
if node.v == v and v != x:
return True
if v > node.v:
return node.r is not None and self.exists(v, x, node.r)
else:
return node.l is not None and self.exists(v, x, node.l)
def two_sum(self, x, node=None):
if node is None:
node = self._tree
if self.exists(x-node.v, node.v):
return True
return ((node.r is not None and self.two_sum(x, node.r)) or
(node.l is not None and self.two_sum(x, node.l)))
import unittest
class TestBst(unittest.TestCase):
def test_two_sum(self):
bst = Bst()
bst.add(4)
bst.add(3)
bst.add(5)
bst.add(8)
bst.add(1)
self.assertTrue(bst.two_sum(6))
self.assertTrue(bst.two_sum(8))
self.assertFalse(bst.two_sum(2))
self.assertFalse(bst.two_sum(1))
if __name__ == '__main__':
unittest.main()
| [
"alonso.vidales@tras2.es"
] | alonso.vidales@tras2.es |
ad11a8c211c0d94f0c80515ff0321a91d0538ace | 9f0532cd700a9cdaefeb6274608aa971c23a3be8 | /raspi_io/graph.py | 28c8fc0622e05512c33b588db3817e9af5d232a6 | [
"MIT"
] | permissive | Kassadinsw/raspi-io | ac494ede3a6404228eac19261d5b9b2eaba69f8f | 159e45120ca0ffc86549ad83ef31c140a8dd6e21 | refs/heads/master | 2020-05-20T11:23:19.134443 | 2018-07-20T08:41:39 | 2018-07-20T08:41:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,620 | py | # -*- coding: utf-8 -*-
import os
from PIL import Image
from .client import RaspiWsClient
from .core import RaspiBaseMsg, RaspiAckMsg, get_binary_data_header
__all__ = ['MmalGraph', 'GraphInit', 'GraphClose', 'GraphProperty']
class GraphInit(RaspiBaseMsg):
_handle = 'init'
_properties = {'display_num'}
def __init__(self, **kwargs):
super(GraphInit, self).__init__(**kwargs)
class GraphClose(RaspiBaseMsg):
_handle = 'close'
def __init__(self, **kwargs):
super(GraphClose, self).__init__(**kwargs)
class GraphProperty(RaspiBaseMsg):
_handle = 'get_property'
_properties = {'property'}
URI, IS_OPEN, DISPLAY_NUM = 1, 2, 3
def __init__(self, **kwargs):
super(GraphProperty, self).__init__(**kwargs)
class MmalGraph(RaspiWsClient):
LCD = 4
HDMI = 5
REDUCE_SIZE_FORMAT = ("BMP",)
PATH = __name__.split(".")[-1]
def __init__(self, host, display_num=HDMI, reduce_size=True, timeout=3, verbose=1):
"""Display a graph on raspberry pi specified monitor
:param host: raspberry pi address
:param display_num: display monitor number (HDMI or LCD)
:param reduce_size: reduce bmp graph size then transfer
:param timeout: raspi-io timeout unit second
:param verbose: verbose message output
"""
super(MmalGraph, self).__init__(host, str(display_num), timeout, verbose)
ret = self._transfer(GraphInit(display_num=display_num))
if not isinstance(ret, RaspiAckMsg) or not ret.ack:
raise RuntimeError(ret.data)
self.__uri = ""
self.__reduce_size = reduce_size
def __del__(self):
try:
self.close()
except AttributeError:
pass
@property
def uri(self):
return self.__uri
@property
def is_open(self):
ret = self._transfer(GraphProperty(property=GraphProperty.IS_OPEN))
return ret.data if isinstance(ret, RaspiAckMsg) and ret.ack else False
@property
def display_num(self):
ret = self._transfer(GraphProperty(property=GraphProperty.DISPLAY_NUM))
return ret.data if isinstance(ret, RaspiAckMsg) and ret.ack else None
def open(self, path, reduce_size=None):
"""Open an image display on raspberry pi via mmal video core
:param path:
:param reduce_size: reduce bmp graph size then transfer
:return:
"""
self.__uri = ""
png_path = "{}.png".format(os.path.basename(path))
reduce_size = reduce_size if reduce_size is not None else self.__reduce_size
try:
# Open original file
image = Image.open(path)
fmt = image.format
# Reduce image size to png format
if reduce_size and fmt in self.REDUCE_SIZE_FORMAT:
image.save(png_path)
path = png_path
fmt = "PNG"
# Read data to memory
with open(path, "rb") as fp:
data = fp.read()
# First transfer header info
if self._send_binary_data(get_binary_data_header(data, fmt, "open"), data):
self.__uri = path
return True
else:
return False
except IOError as err:
self._error("Open error:{}".format(err))
return False
finally:
if os.path.isfile(png_path):
os.remove(png_path)
def close(self):
ret = self._transfer(GraphClose())
return ret.data if isinstance(ret, RaspiAckMsg) and ret.ack else False
| [
"amaork@gmail.com"
] | amaork@gmail.com |
22cce2269ea2ed4befefe7ca4abc2ac9e571ba4b | de861acdf4d51a766512be0834055ad403916677 | /xii/meshing/tikz.py | 1bb1f2099ba7f87d45925685e176cfef56a97c9b | [
"MIT"
] | permissive | ChaogeCanFly/fenics_ii | 847c3faf4e1bf591addbe5a279980497f87d9c90 | 49a18855d077ab6e63e4f0b4d6a2f061de7f36ba | refs/heads/master | 2022-12-05T04:25:53.241712 | 2020-04-17T07:40:32 | 2020-04-17T07:40:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,979 | py | from dolfin import *
template=r'''
\documentclass{standalone}
\usepackage{tikz}
\usetikzlibrary{calc}
\usetikzlibrary{shapes, snakes, patterns, arrows}
\usepackage{pgfplots}
\usepackage{pgfplotstable}
\usepackage{amsmath, amssymb}
\begin{document}
\begin{tikzpicture}
%(body)s
\end{tikzpicture}
\end{document}
'''
def tikzify_2d_mesh(facet_info, cell_info=None, vertex_info=None):
'''Standalone Tikz representation of the mesh'''
body = []
if cell_info is not None:
cell_markers, cell_style_map = cell_info
assert cell_style_map is not None
mesh = cell_markers.mesh()
x = mesh.coordinates()
code = r'\fill[%s] (%g, %g) -- (%g, %g) -- (%g, %g) -- cycle;'
for idx, cell in enumerate(mesh.cells()):
style = cell_style_map[cell_markers[idx]]
body.append(code % ((style, ) + tuple(x[cell].flatten())))
if isinstance(facet_info, tuple):
facet_info = [facet_info]
for fi in facet_info:
facet_markers, facet_style_map = fi
mesh = facet_markers.mesh()
assert mesh.geometry().dim() == 2
x = mesh.coordinates()
dim = facet_markers.dim()
assert dim == 1
mesh.init(dim)
mesh.init(dim, 0)
line = r'\draw[%(style)s] (%(x00)g, %(x01)g) -- (%(x10)g, %(x11)g);'
for facet in facets(mesh):
style = facet_style_map[facet_markers[facet]]
if style is not None:
x0, x1 = x[facet.entities(0)]
body.append(line % {'style': style, 'x00': x0[0], 'x01': x0[1],
'x10': x1[0], 'x11': x1[1]})
if vertex_info is not None:
if isinstance(vertex_info, tuple):
vertex_info = [vertex_info]
for vi in vertex_info:
vertex_markers, vertex_style_map = vi
assert vertex_style_map is not None
mesh = vertex_markers.mesh()
x = mesh.coordinates()
code = r'\node[%s] at (%g, %g) {%s};'
for idx, vtx in enumerate(mesh.coordinates()):
style, marker = vertex_style_map[vertex_markers[idx]]
if style is not None:
body.append(code % (style, vtx[0], vtx[1], marker))
body = '\n'.join(body)
return template % {'body': body}
def load_mesh(h5_file, data_sets):
'''
Read in mesh and mesh functions from the data set in HDF5File.
Data set is a tuple of (topological dim of entities, data-set-name)
'''
h5 = HDF5File(mpi_comm_world(), h5_file, 'r')
mesh = Mesh()
h5.read(mesh, 'mesh', False)
mesh_functions = []
for dim, ds in data_sets:
if h5.has_dataset(ds):
f = MeshFunction('size_t', mesh, dim, 0)
h5.read(f, ds)
else:
f = None
mesh_functions.append(f)
return mesh, mesh_functions
# -------------------------------------------------------------------
if __name__ == '__main__':
from itertools import repeat
path = './round_bdry.geo_d0.03125_0.5.h5'
dim = 2
mesh, [subdomains, bdries] = load_mesh(path, data_sets=((dim, 'volumes'), (dim-1, 'surfaces'), ))
# style_map = dict(zip(set(bdries.array()), repeat('black!50!white, very thin')))
style_map = dict(zip(set(bdries.array()), repeat(None)))
style_map[1] = 'red, very thin'
code = tikzify_2d_mesh(bdries, style_map)
with open('mesh_2d.tex', 'w') as f: f.write(code)
mesh = UnitSquareMesh(2, 2)
facet_f = MeshFunction('size_t', mesh, 1, 0)
DomainBoundary().mark(facet_f, 1)
facet_style_map = {0: 'black', 1: 'black'}
cell_f = MeshFunction('size_t', mesh, 2, 0)
cell_f[0] = 1
cell_style_map = {0: 'red', 1: 'blue'}
code = tikzify_2d_mesh((facet_f, facet_style_map)
(cell_f, cell_style_map))
with open('mesh_2d.tex', 'w') as f: f.write(code)
| [
"miroslav.kuchta@gmail.com"
] | miroslav.kuchta@gmail.com |
0d30bcc4d29dbf9b8f231055058bc5135d84c218 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/420/usersdata/329/88117/submittedfiles/exe11.py | 36546467df9037e9e9fe2183694c2c0da1de4c3f | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | # -*- coding: utf-8 -*-
n = int(input("digite um numero com 8 algarismos: "))
resto = n % 10
n = (n - resto)/10
soma = soma + resto
while soma < 72 :
print ('%d' % soma)
while soma > 1:
resto = n % 10
n = (n - resto)/10
soma = soma + resto
print ('%d' % soma)
while soma > 72:
print('NAO SEI')
while n < 1:
print('NAO SEI')
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
b9ae865af4885206e8591c08daf99dfb5e4e0c87 | 048eaa32bcd05ee278e6f391f9091f1848022987 | /dbdk/ai_based_adaptive_security_system/test_model_ig_init_train.py | b0db71af2f9704dba04a033ca5d993f8cdd48e5d | [] | no_license | kendricklee91/portfolio | 613e14fd6f13027c6d68d56b1b3d96b186de56b1 | ef92604a1e14a3b81ae5e91883c07501def6b3da | refs/heads/master | 2022-11-15T01:00:55.724686 | 2022-10-27T06:47:44 | 2022-10-27T06:47:44 | 170,043,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,557 | py | from ksv_model.model_payload import known_model, known_model_retrain, known_model_hopt, known_model_inference, data_load_save
from ksv_model.model_ig import ModelIG, ig_data_load_save
import ksv_model.config.const as cst
from sklearn.model_selection import train_test_split, GridSearchCV, RandomizedSearchCV
from sklearn.metrics import roc_auc_score
from hyperopt import fmin, tpe, hp, STATUS_OK, Trials, space_eval
from scipy import stats
import pandas as pd
import unittest
import json
import os
# import pprint # for test
class TestModel1st(unittest.TestCase):
# Fixture
def setUp(self):
pass
def tearDown(self):
pass
# normal train test of sql injection model
def test_ig_train(self):
load_file_dir = os.path.join(cst.PATH_DATA, 'ig_ps_20190830_01.csv')
save_model_dir = os.path.join(cst.PATH_MODEL, 'ig_train_01.json')
save_model_wt_dir = os.path.join(cst.PATH_MODEL, 'ig_train_01.h5')
# model parameters
model_params = os.path.join(cst.PATH_CONFIG, 'model_ig_param.json')
with open(model_params, encoding='utf-8') as json_file:
params = json.load(json_file)
kmt = ModelIG()
model, hist, loss, acc = kmt.create_model(load_file_dir, params)
# print(acc) # for test
# save model
dls = ig_data_load_save()
save_result = dls.save_model_and_weight(model, save_model_dir, save_model_wt_dir)
if __name__ == "__main__":
unittest.main() | [
"noreply@github.com"
] | kendricklee91.noreply@github.com |
2cba5f29bc01c1976a25da33aa9cd8b4d8ef6a2c | 49bf36ba213b28d4aaeb63feba632fb05710d565 | /Python/BOJ/2941.py | 24dae225acced1392db2bc7a11b07906bb71c616 | [] | no_license | ohmozi/Algorithm | fc3fc861d4125b642d64b6e344eca806d137d0f2 | 436a376b009e8c073ceebc6b1e29b32b63c15a07 | refs/heads/master | 2023-07-23T16:01:49.774331 | 2021-08-16T02:08:56 | 2021-08-16T02:08:56 | 284,995,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | # 크로아티아 알파벳
croatian = ['c=','c-','d-','lj','nj','s=','z=']
# "dz="는 예외
text = input()
i = 0
count= 0
# 3,2,1순으로 확인하기
while i < len(text):
temp = text[i:i+3]
if temp == "dz=":
# print("cro", temp)
i += 3
else:
temp = text[i:i+2]
if temp in croatian:
temp = text[i:i+2]
# print("cro",temp)
i += 2
else:
temp = text[i]
# print("not cro", temp)
i += 1
count += 1
print(count) | [
"gown10195@gmail.com"
] | gown10195@gmail.com |
75c467a86726f93b6c2fe1be168a9a16d4ee2d79 | 6a3af6fe669b2e17db1fa7d0751cbc4e04948079 | /fn_sdk_test/fn_sdk_test/components/funct_utilities_pdfid.py | a99c6c6446ee1d02426ea7c1c986e1446f201611 | [
"MIT"
] | permissive | jjfallete/resilient-community-apps | 5f0a728fe0be958acc44d982bf0289959f84aa20 | 2e3c4b6102555517bad22bf87fa4a06341714166 | refs/heads/master | 2022-04-17T13:20:36.961976 | 2020-04-13T07:03:54 | 2020-04-13T07:03:54 | 169,295,943 | 1 | 0 | MIT | 2020-04-13T07:03:56 | 2019-02-05T19:06:57 | Python | UTF-8 | Python | false | false | 2,539 | py | # -*- coding: utf-8 -*-
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
PACKAGE_NAME = "fn_sdk_test"
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'utilities_pdfid''"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get(PACKAGE_NAME, {})
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get(PACKAGE_NAME, {})
@function("utilities_pdfid")
def _utilities_pdfid_function(self, event, *args, **kwargs):
"""Function: Produces summary information about the structure of a PDF file, using Didier Stevens' pdfid (https://blog.didierstevens.com/programs/pdf-tools/). Provide the PDF file content as a base64-encoded string, for example the output from the “Attachment to Base64” function.
This function is useful in initial triage of suspicious email attachments and other files. It allows you to identify PDF documents that contain (for example) JavaScript or that execute an action when opened. PDFiD also handles name obfuscation. The combination of PDF automatic action and JavaScript makes a document very suspicious."""
try:
# Get the wf_instance_id of the workflow this Function was called in
wf_instance_id = event.message["workflow_instance"]["workflow_instance_id"]
yield StatusMessage("Starting 'utilities_pdfid' running in workflow '{0}'".format(wf_instance_id))
# Get the function parameters:
base64content = kwargs.get("base64content") # text
log = logging.getLogger(__name__)
log.info("base64content: %s", base64content)
##############################################
# PUT YOUR FUNCTION IMPLEMENTATION CODE HERE #
##############################################
yield StatusMessage("Finished 'utilities_pdfid' that was running in workflow '{0}'".format(wf_instance_id))
results = {
"content": "xyz"
}
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception:
yield FunctionError()
| [
"ihor.husar@ibm.com"
] | ihor.husar@ibm.com |
4fd4c2d22d7b4d50e4eb887b4ecc430a0c2dace9 | f340b9f47aaf11d95911074efd16e2878b4608c5 | /200111/Find_Leaves_of_Binary_Tree.py | 18926424e163a47aee3fc9c85898b4f174fd9609 | [] | no_license | Jsonghh/leetcode | 150020d1250a7e13e7387a545b4eb7df0de8f90b | 3a83c0b0bcc43f458f7fc54764f60e1104fcc12e | refs/heads/master | 2020-11-25T03:12:48.842151 | 2020-02-11T02:56:58 | 2020-02-11T02:56:58 | 228,475,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 994 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def findLeaves(self, root: TreeNode) -> List[List[int]]:
ans = []
if not root:
return ans
while root:
leaves = []
if self.is_leaf(root, leaves):
leaves.append(root.val)
root = None
ans.append(leaves[:])
return ans
def is_leaf(self, node, leaves):
if not node.left and not node.right:
return True
if node.left and self.is_leaf(node.left, leaves):
leaves.append(node.left.val)
node.left = None
if node.right and self.is_leaf(node.right, leaves):
leaves.append(node.right.val)
node.right = None
return False
| [
"jiesonghe@outlook.com"
] | jiesonghe@outlook.com |
793b94513930d4a6f4168891222bebd9d24bc2cf | 75e641d2b33d0865e19193877e48748b3de5007c | /Parameter-Config/parameters_examples.py | 1b708cdceb37bafbe36ad059ac0e24e380d2c198 | [
"MIT"
] | permissive | bergolho/fractal-tree | dc3f7918ab6f1a6f714aaec56ee15e092e180733 | c55a375585aac5168063fe34d078b54d3f43364c | refs/heads/master | 2020-06-14T02:42:36.908723 | 2020-02-06T17:57:04 | 2020-02-06T17:57:04 | 194,871,693 | 0 | 0 | null | 2019-07-02T13:45:39 | 2019-07-02T13:45:39 | null | UTF-8 | Python | false | false | 5,275 | py | # -*- coding: utf-8 -*-
"""
This module contains the Parameters class that is used to specify the input parameters of the tree.
"""
import numpy as np
class Parameters():
"""Class to specify the parameters of the fractal tree.
Attributes:
meshfile (str): path and filename to obj file name.
filename (str): name of the output files.
init_node (numpy array): the first node of the tree.
second_node (numpy array): this point is only used to calculate the initial direction of the tree and is not included in the tree. Please avoid selecting nodes that are connected to the init_node by a single edge in the mesh, because it causes numerical issues.
init_length (float): length of the first branch.
N_it (int): number of generations of branches.
length (float): average lenght of the branches in the tree.
std_length (float): standard deviation of the length. Set to zero to avoid random lengths.
min_length (float): minimum length of the branches. To avoid randomly generated negative lengths.
branch_angle (float): angle with respect to the direction of the previous branch and the new branch.
w (float): repulsivity parameter.
l_segment (float): length of the segments that compose one branch (approximately, because the lenght of the branch is random). It can be interpreted as the element length in a finite element mesh.
Fascicles (bool): include one or more straigth branches with different lengths and angles from the initial branch. It is motivated by the fascicles of the left ventricle.
fascicles_angles (list): angles with respect to the initial branches of the fascicles. Include one per fascicle to include.
fascicles_length (list): length of the fascicles. Include one per fascicle to include. The size must match the size of fascicles_angles.
save (bool): save text files containing the nodes, the connectivity and end nodes of the tree.
save_paraview (bool): save a .vtu paraview file. The tvtk module must be installed.
"""
def __init__(self):
# Rabbit heart example
self.meshfile='Mesh/WSA_i.obj'
# LV
#self.filename='Purkinje-Networks/LV-rabbit'
#self.init_node=np.array([-7.35027,4.06893,0.244092])
#self.second_node=np.array([-6.97912,3.95322,0.334369])
#self.init_length=8.0
#Number of iterations (generations of branches)
#self.N_it=7
#Median length of the branches
#self.length=2.0
#Standard deviation of the length
#self.std_length = np.sqrt(0.2)*self.length
#Min length to avoid negative length
#self.min_length = self.length/2.0
#self.branch_angle=0.1
#self.w=0.02
#Length of the segments (approximately, because the lenght of the branch is random)
#self.l_segment=0.1
#self.Fascicles=True
# RV
self.filename='Purkinje-Networks/RV-rabbit'
self.init_node=np.array([-7.559,7.542,0.111])
self.second_node=np.array([-7.18805,7.47768,0.237085])
self.init_length=9.0
#Number of iterations (generations of branches)
self.N_it=7
#Median length of the branches
self.length=2.0
#Standard deviation of the length
self.std_length = np.sqrt(0.2)*self.length
#Min length to avoid negative length
self.min_length = self.length/2.0
self.branch_angle=0.07
self.w=0.03
#Length of the segments (approximately, because the lenght of the branch is random)
self.l_segment=0.1
self.Fascicles=True
###########################################
# Fascicles data
###########################################
self.fascicles_angles=[-1.5,.2] #rad
self.fascicles_length=[.5,.5]
# Save data?
self.save=True
self.save_paraview=True
'''
# Sphere example
self.meshfile='Mesh/sphere.obj'
self.filename='sphere-line'
self.init_node=np.array([-1.0 ,0., 0.])
self.second_node=np.array([-0.964, 0.00, 0.266 ])
self.init_length=0.5
#Number of iterations (generations of branches)
self.N_it=10
#Median length of the branches
#self.length=.3
self.length=2.0
#Standard deviation of the length
self.std_length = np.sqrt(0.2)*self.length
#Min length to avoid negative length
self.min_length = self.length/10.
self.branch_angle=0.15
self.w=0.1
#Length of the segments (approximately, because the lenght of the branch is random)
self.l_segment=.01
self.Fascicles=True
'''
'''
# Block example
#self.meshfile='Mesh/block_i.obj'
#self.filename='block-test'
#self.init_node=np.array([0.14,0.24,0.04])
#self.second_node=np.array([-0.04,0.06,-0.06])
#self.init_length=0.5
#Number of iterations (generations of branches)
self.N_it=10
#Median length of the branches
#self.length=.3
self.length=2.0
#Standard deviation of the length
self.std_length = np.sqrt(0.2)*self.length
#Min length to avoid negative length
self.min_length = self.length/10.
self.branch_angle=0.15
self.w=0.1
#Length of the segments (approximately, because the lenght of the branch is random)
self.l_segment=.01
self.Fascicles=True
'''
| [
"berg@ice.ufjf.br"
] | berg@ice.ufjf.br |
3dd7912d492a6bdcb9818d1262ea0f835f4d0457 | 79b19ce0ae1c305cbcc70efc1ddb3787fff6a211 | /reax/out2garf.py | bbc838072a16fde95366020a08f1222b263933dd | [
"MIT"
] | permissive | hsulab/DailyScripts | 0e35dcbe103c439045b520987fa808f286cf27a6 | 55c0c269b9382bbb5bd9a07e91bb0683351c077a | refs/heads/master | 2022-05-06T14:02:02.727633 | 2022-04-21T13:14:42 | 2022-04-21T13:14:42 | 244,178,239 | 3 | 0 | MIT | 2020-10-01T20:27:10 | 2020-03-01T15:55:58 | Python | UTF-8 | Python | false | false | 8,881 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import time
import argparse
import numpy as np
from scipy import integrate
pi = np.pi
norm = np.linalg.norm
inv = np.linalg.inv
dot = np.dot
cross = np.cross
arccos = np.arccos
description=r"""
Author: Jiayan Xu, jxu15@qub.ac.uk
"""
def read_outcar(outcar='OUTCAR', natoms=100 , nframes=1000, wdat=False):
# check file existence
if not os.path.exists(outcar):
raise ValueError('%s doesnot exist.' %outcar)
# read OUTCAR
frames = []
energies = []
fopen = open(outcar, 'r')
count, flag = 0, True
while flag:
line = fopen.readline()
if line.startswith(' POSITION'):
fopen.readline() # segment line ---...---
poses, forces = [], []
for n in range(natoms):
data = fopen.readline().strip().split()
poses.append(data[:3]) # x y z
forces.append(data[3:]) # fx fy fz
poses = np.array(poses, dtype=float)
forces = np.array(forces, dtype=float)
frames.append((poses, forces))
if line.startswith(' FREE ENERGIE'):
fopen.readline() # segment line ---...---
fopen.readline() # free energy TOTEN
fopen.readline() # blank line
data = fopen.readline()
energies.append(data.strip().split()[-1])
count += 1
if count == nframes:
flag = False
fopen.close()
print('Successfully read %s, get positions, forces, energies ...' %outcar)
energies = np.array(energies, dtype=float)
# out datfile ?
'''
for i, d in enumerate(data):
content = '#\n'
for j in range(nsteps):
pos, force = data[i][0][j], data[i][1][j]
content += ('{:<12.4f}'*6+'\n').format(*pos, *force)
with open('atom-'+str(i+1)+'.dat', 'w') as writer:
writer.write(content)
break
'''
return frames, energies
def read_poscar(poscar='POSCAR', format='vasp5'):
"""read POSCAR"""
# check file existence
if not os.path.exists(poscar):
raise ValueError('%s doesnot exist.' %poscar)
# read poscar
with open(poscar, 'r') as reader:
lines = reader.readlines()
lines = [line.strip().split() for line in lines]
fname = ' '.join(lines[0]) # file description
scaling = float(lines[1][0])
lattice = np.array(lines[2:5], dtype=float)
if lines[5][0].isdigit():
raise ValueError('Please use VASP5 POSCAR format.')
symbols = lines[5]
numbers = [int(i) for i in lines[6]]
natoms = np.sum(numbers)
dyntype = ' '.join(lines[7]) # dynamic type
coorsys = lines[8] # coordinate system
poses, fixes = [], []
for coord in lines[9:9+natoms]:
poses.append(coord[:3])
fixes.append(coord[3:])
poses = np.array(poses, dtype=float)
# symbols
atoms = []
for s, n in zip(symbols, numbers):
atoms.extend([s]*n)
print('Successfully read %s ...' %poscar)
return fname, scaling, lattice, symbols, numbers, atoms, poses, fixes
def write_biogrf(name, lx, ly, lz, alpha, beta, gamma, atoms, coords):
"""Generate a lammps-like bgf file."""
# first few lines
content = 'BIOGRF 200\n' # FILTYP (A6,I5)
content += 'DESCRP ' + name + '\n' # ('DESCRP',1X,A8)
content += 'REMARK generated by pos2bgf.py\n' # ('REMARK',1X,A)
# ('FORCEFIELD',1X,A8)
# ('PERIOD',1X,3I1)
content += 'AXES xyz\n' # ('AXES',3X,A)
content += ('CRYSTX ' + 6*'{:>11.5f}' + '\n')\
.format(lx, ly, lz, alpha, beta, gamma) # ('CRYSTX',1X,6F11.5)
# constraint
#content += 'BOND RESTRAINT 1 2 0.50 7500.00 7500.00 0.0000000 0 0\n'
# ('ATOM'|'HETATM',1X,I5,1X,A5,1X,A3,1X,A1,1X,A5,3F10.5,1X,A5,I3,I2,1X,F8.5)
atom_format = 'HETATM {:>5d} {:>5s} {:>3s} {:>1s} {:>5s}'+3*'{:>10.5f}'+\
' {:>5s}{:>3d}{:>2d} {:>8.5f}\n'
for i, (atom, coord) in enumerate(zip(atoms, coords)):
content += atom_format.format(i+1, atom, '', '', '', \
*coord, atom, 0, 0, 0)
# END
content += 'END\n'
return content
def calc_trans(lattice):
# lattice
a, b, c = lattice
lx, ly, lz = norm(a), norm(b), norm(c) # angstrom
alpha = arccos(dot(b,c)/ly/lz)/pi*180
beta = arccos(dot(a,c)/lx/lz)/pi*180
gamma = arccos(dot(a,b)/lx/ly)/pi*180 # degree
vol = dot(a, cross(b, c))
lat_lmp = np.array([[lx, 0, 0,], \
[ly*np.cos(gamma/180*np.pi), ly*np.sin(gamma/180*np.pi), 0], \
[0, 0, lz]])
a_lmp, b_lmp, c_lmp = lat_lmp[0], lat_lmp[1], lat_lmp[2]
trans_matrix = 1/vol*np.dot(lat_lmp.T, \
[np.cross(b,c),np.cross(c,a),np.cross(a,b)])
return lx, ly, lz, alpha, beta, gamma, trans_matrix
def adjust_poses(poses, refposes):
"""poses, poses_ref should be in direct"""
# atoms, take last step as reference
for i in range(len(poses)):
for x in range(3):
move = round(poses[i][x] - refposes[i][x], 0)
poses[i][x] -= move
refposes = poses.copy()
return poses, refposes
def write_trainsetin(names, energies, refname):
EVTOKCAM = 23.061
content = 'ENERGY 100.0\n'
content += '# weight / structure / reference / DFT in kcal/mol\n'
sample_format = '1.0 + {:>12s}/1 - {:>12s}/1 {:>12.4f}\n'
for name, energy in zip(names, energies):
content += sample_format.format(name, refname, energy*EVTOKCAM)
content += 'ENDENERGY'
return content
def out2garf(outcar='OUTCAR', poscar='POSCAR', nframes=100, intv=5,\
refstructure=['POSCAR', '0.0'], samplename='POSCAR'):
"""
"""
# read POSCAR
fname, scaling, lattice, \
symbols, numbers, atoms, refpos, fixes = read_poscar(poscar)
natoms = np.sum(numbers)
lx, ly, lz, alpha, beta, gamma, trans_matrix = calc_trans(lattice)
# read OUTCAR
frames, energies = read_outcar(outcar, natoms, nframes)
# adjust positiosn and get cartesian coordinates
dirposes = []
cartposes = []
for frame in frames:
dirpos = dot(frame[0], inv(lattice.T))
dirpos, refpos = adjust_poses(dirpos, refpos)
dirposes.append(dirpos)
cartposes.append(dot(dirpos, lattice))
cartposes = np.array(cartposes)
# write geo
names = []
geo_content = ''
for i, cartpos in enumerate(cartposes):
if i%intv == 0:
name = samplename + '_' + str(i+1).zfill(4)
coords = (dot(trans_matrix, cartpos.T)).T
geo_content += write_biogrf(name, lx, ly, lz, \
alpha, beta, gamma, atoms, coords) + '\n'
names.append(name)
refposcar, refenergy = refstructure[0], float(refstructure[1])
if not (isinstance(refposcar, str) and isinstance(refenergy, float)):
raise ValueError('First must be POSCAR path and second must be energy.')
fname, scaling, lattice, \
symbols, numbers, atoms, refpos, fixes = read_poscar(refposcar)
lx, ly, lz, alpha, beta, gamma, trans_matrix = calc_trans(lattice)
refname = samplename + '_REF'
cartpos = dot(refpos, lattice)
coords = (dot(trans_matrix, cartpos.T)).T
ref_content = write_biogrf(refname, lx, ly, lz, \
alpha, beta, gamma, atoms, coords)
geo_content = ref_content + '\n' + geo_content
with open('geo', 'w') as writer:
writer.write(geo_content)
print('Successfully write geo ...')
# write trainset.in file
energies = energies - refenergy
tsi_content = write_trainsetin(names, energies, refname)
with open('trainset.in', 'w') as writer:
writer.write(tsi_content)
print('Successfully write trainset.in ...')
if __name__ == '__main__':
# args
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-v', '--version', \
action='version', version='%(prog)s 0.1')
parser.add_argument('-o', '--outcar', nargs='?',\
default='OUTCAR', help='OUTCAR')
parser.add_argument('-p', '--poscar', nargs='?',\
default='POSCAR', help='POSCAR')
parser.add_argument('-nf', '--nframes', required=True,\
type=int, help='Number of Frames')
parser.add_argument('-i', '--interval', \
type=int, default=10, help='Selection Interval')
parser.add_argument('-r', '--refstr', nargs=2, required=True,\
help='Reference Structure')
parser.add_argument('-sn', '--samplename', required=True,\
help='Sample Name')
args = parser.parse_args()
#out2garf(outcar='OUTCAR', poscar='POSCAR', nframes=5000, intv=500,\
# refstructure=['POSCAR', '-5000.0'], samplename='CO2ad')
out2garf(args.outcar, args.poscar, args.nframes, args.interval,\
args.refstr, args.samplename)
| [
"ahcigar@foxmail.com"
] | ahcigar@foxmail.com |
8cc06657b6869b3435b2d98c650dc7996905f496 | 8644a2174c3cb7ccfe211a5e49edffbcc3a74a46 | /Learning/Algorithms/DevideAndConquer/longest_com_prefix.py | eafc03411d5fb9a2d33de218b11db52537c54464 | [] | no_license | bhavya2403/Learning-Python | 9e7cc9dee21172321fb217cae27c8072357f71ce | 3898211b357fbab320010a82a4811b68611d0422 | refs/heads/main | 2023-03-24T03:19:49.989965 | 2021-03-22T20:11:04 | 2021-03-22T20:11:04 | 315,962,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 537 | py | def combineTwo(strA, strB):
i = 0
m, n = len(strA), len(strB)
while i < min(m, n):
if strB[i] != strA[i]:
if not i:
return ''
return strB[:i]
i += 1
return strB[:i]
def longestCommonPrefix(arr, l, r):
if l==r:
return arr[l]
m = (l+r)//2
a = longestCommonPrefix(arr, l, m)
b = longestCommonPrefix(arr, m+1, r)
return combineTwo(a, b)
print(longestCommonPrefix(["geeksforgeeks", "geeks", "geek", "geezer"], 0, 3)) | [
"noreply@github.com"
] | bhavya2403.noreply@github.com |
0ce215260f26c84f7bd0381d434be95578624498 | e621a2e763709336894bb33623cf6d20d7858c6f | /Stepwise.py | 373650f95dae4d0688a030c7efa06ead06e3cbae | [] | no_license | myliu/document-classification | 49d688fe0b5fdd79a6d2fca36b78277d273762cd | 7b9078912a4fd770ea614660ec770ccbc23bfdd1 | refs/heads/master | 2021-01-01T05:38:25.318323 | 2013-09-16T01:22:53 | 2013-09-16T01:22:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,586 | py | ## author: Mingyu Liu
## author: Shi He
import numpy as np
import string
from Dataset import *
def main():
d = Dataset("rec.sport.hockey.txt", "rec.sport.baseball.txt", cutoff=200)
(Xtrain, Ytrain, Xtest, Ytest) = d.getTrainAndTestSets(0.8, seed=100)
lam = 100
cols = []
currentError = 1
n = Xtrain.shape[1]
dic = {}
## i is the number of features to be added to cols
for i in range(40):
bestJ = 0
bestErrorRate = 1
for j in range(n):
cols.append(j)
w = trainRidge(Xtrain[:, cols], Ytrain, lam)
errorRate = computeError(Xtrain[:, cols], Ytrain, w)
if errorRate < bestErrorRate:
bestJ = j
bestErrorRate = errorRate
## print 'Best error rate is ' + str(bestErrorRate)
cols.pop()
if bestErrorRate >= currentError:
break
else:
cols.append(bestJ)
dic[bestJ] = currentError - bestErrorRate
currentError = bestErrorRate
print 'Current error rate is ' + str(currentError)
w = trainRidge(Xtrain[:, cols], Ytrain, lam)
trainError = computeError(Xtrain[:, cols], Ytrain, w)
print 'Train error rate is ' + str(trainError)
testError = computeError(Xtest[:, cols], Ytest, w)
print 'Test error rate is ' + str(testError)
## find the top 10 features
wordList = d.getWordList()
topCols = [(key, value) for key, value in sorted(dic.iteritems(), key = lambda(k, v) : (v, k), reverse = True)]
topCols = topCols[: 10]
topFeatures = [wordList[index] for (index, value) in topCols]
for f in topFeatures:
print f
def trainRidge(Xtrain, Ytrain, lam):
Xmatrix = np.asmatrix(Xtrain)
Ymatrix = np.asmatrix(Ytrain)
return np.linalg.inv(Xmatrix.T * Xmatrix + lam * np.eye(Xmatrix.shape[1])) * Xmatrix.T * Ymatrix
def computeError(Xtest, Ytest, w):
correctCount = 0
incorrectCount = 0
for testIndex in range(Ytest.size):
xtest = Xtest[testIndex]
xtestw = xtest * w
## if sign(xtestw) > 0, expected = 1; if sign(xtestw) <= 0, expected = -1
expected = -1
if xtestw > 0:
expected = 1
if expected == Ytest[testIndex]:
correctCount += 1
else:
incorrectCount += 1
return incorrectCount * 1.0 / (correctCount + incorrectCount)
if __name__ == "__main__":
main()
| [
"mliu@quantcast.com"
] | mliu@quantcast.com |
3260d1f7fbafdfc7446d26ae7c1e39ae12844548 | 2f418a0f2fcca40f84ec0863b31ff974b574350c | /scripts/addons_extern/sound_drivers/driver_panels.py | 1c049ce3d52ed61492e5cc8e6487450af216c921 | [] | no_license | JT-a/blenderpython279 | 57a81b55564218f3b1417c2ffa97f5161897ec79 | 04846c82f794c22f87d677d9eb8cec1d05c48cda | refs/heads/master | 2021-06-25T06:58:07.670613 | 2017-09-11T11:14:36 | 2017-09-11T11:14:36 | 103,723,697 | 4 | 2 | null | 2017-09-16T04:09:31 | 2017-09-16T04:09:31 | null | UTF-8 | Python | false | false | 14,742 | py | bl_info = {
"name": "Driver Panels",
"author": "batFINGER",
"location": "Properties > Speaker > MIDI",
"description": "Display Drivers in TOOLS and PROPERTIES ",
"warning": "Still in Testing",
"wiki_url": "http://wiki.blender.org/index.php/\
User:BatFINGER/Addons/Sound_Drivers",
"version": (1, 0),
"blender": (2, 7, 6),
"tracker_url": "",
"icon": 'DRIVER',
"support": 'TESTING',
"category": "Animation",
}
import bpy
from bpy.utils import register_class, unregister_class
from bpy.props import BoolProperty
from sound_drivers.utils import get_icon, bpy_collections, icon_from_bpy_datapath
class DRIVER_UL_driven_objects(bpy.types.UIList):
use_filter_empty = BoolProperty(name="Filter Empty", default=False, options=set(),
description="Whether to filter empty vertex groups")
use_filter_empty_reverse = BoolProperty(name="Reverse Empty", default=False, options=set(),
description="Reverse empty filtering")
use_filter_name_reverse = BoolProperty(name="Reverse Name", default=False, options=set(),
description="Reverse name filtering")
use_filter_name = BoolProperty(name="Object Name", default=True, options=set(),
description="Reverse name filtering")
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, filter_flg):
ob = data
self.use_filter_sort_alpha = True
coll = active_propname.strip("active_index")
collection = getattr(bpy.data, coll)
obj = collection.get(item.name)
icon = get_icon(obj.type) if hasattr(obj, "type") else icon_from_bpy_datapath(repr(obj))
if self.layout_type in {'DEFAULT', 'COMPACT'}:
layout.label(" %s" % (item.name), icon=icon)
elif self.layout_type in {'GRID'}:
layout.alignment = 'CENTER'
layout.label(text="", icon=icon)
return
col.label(item.name)
return
def draw_filter(self, context, layout):
# Nothing much to say here, it's usual UI code...
layout.label("filetereer")
layout.prop(self, "filter_name")
layout.prop(self, "use_filter_name")
def filter_items(self, context, data, propname):
col = getattr(data, propname)
filter_name = self.filter_name.lower()
flt_flags = [self.bitflag_filter_item if any(
filter_name in filter_set for filter_set in (
str(i), item.name.lower()
)
)
else 0 for i, item in enumerate(col, 1)
]
if self.use_filter_sort_alpha:
flt_neworder = [x[1] for x in sorted(
zip(
[x[0] for x in sorted(enumerate(col), key=lambda x:( x[1].name))],
range(len(col))
)
)
]
else:
flt_neworder = []
#print(flt_flags, flt_neworder)
return flt_flags, flt_neworder
class DriversManagerPanel(bpy.types.Panel):
"""Driver Tool Panel"""
bl_label = "Driver Manager"
bl_idname = "VIEW3D_PT_DriversManager"
#bl_space_type = 'PROPERTIES'
bl_space_type = "VIEW_3D"
#bl_region_type = 'WINDOW'
bl_region_type = "TOOLS"
#bl_context = "object"
#bl_options = {'DEFAULT_CLOSED'}
bl_category = 'Drivers'
@classmethod
def poll(self, context):
dm = bpy.app.driver_namespace.get("DriverManager")
if not dm:
return True
return bool(len(dm.all_drivers_list))
@classmethod
def idchange(cls, s):
cls.bl_idname = s
def draw_header(self, context):
scene = context.scene
layout = self.layout
dm = bpy.app.driver_namespace.get("DriverManager")
if dm is not None:
dm.draw_menus(self, context)
pass
def draw(self, context):
scene = context.scene
layout = self.layout
dns = bpy.app.driver_namespace
box = layout
dm = dns.get("DriverManager")
UserPrefs = context.user_preferences
if not UserPrefs.system.use_scripts_auto_execute:
row = layout.row()
row.prop(UserPrefs.system, "use_scripts_auto_execute")
row = layout.row()
row.label("Warning Will not work unless Auto Scripts Enabled",
icon='ERROR')
return
if dm is None:
#dm = DriverManager()
box.label("No Driver Mangager", icon='INFO')
row = box.row()
row.operator("drivermanager.update")
row = box.row()
row.label("Once enabled will poll on drivers")
return
# dm.check_updates(context)
row = box.row(align=True)
if not len(dm._all_drivers_list):
box.label("NO DRIVERS FOUND", icon='INFO')
return
###dm.draw_menus(row, context)
dm.draw_filters(self, context)
return None
# FIXME this can all go.
ed = False
settings = scene.driver_gui
drivers_dict = dm.filter_dic
seq_header, node_header = False, False
for collname, collection in drivers_dict.items():
bpy_collection = getattr(bpy.data, collname)
# need to reorder for sequencer and nodes.
# check for filter FIXME
if settings.use_filters:
if hasattr(settings.filters, collname):
if getattr(settings.filters, collname):
continue
row = box.row()
icon = get_icon(collname)
if not len(collection):
continue
for name, object_drivers in collection.items():
iobj = obj = bpy_collection.get(name)
if hasattr(obj, "data") and obj.data is not None:
iobj = obj.data
if not obj:
# a missing ob should invoke a dm refresh
continue
# XXX code for context ...............................
_filter_context_object = True
if (collname == 'objects'
and _filter_context_object
and (obj != context.object
and obj not in context.selected_objects)):
continue
_filter_context_scene = True
if (collname == 'scenes'
and _filter_context_scene
and (obj != context.scene)):
continue
# context world not in all spaces
_filter_context_world = True
if (collname == 'worlds'
and _filter_context_world
and hasattr(context, "world")
and (obj != context.world)):
continue
row = box.row(align=True)
row.label(text="", icon='DISCLOSURE_TRI_RIGHT')
icon = get_icon(obj.rna_type.name)
if hasattr(obj, "type"):
icon = get_icon(obj.type)
'''
if context.scene.objects.active == obj:
row.template_ID(context.scene.objects, 'active')
else:
row.label(text="", icon=icon)
row.prop(obj, "name", text="")
'''
# TO BE REFACTORED
row.label(text=obj.name, icon=icon)
drivers = [dm.find(sdi)
for dp, ds in object_drivers.items()
for i, sdi in ds.items()]
#REFACTO DLO
#dm.draw_layout(layout, context, drivers)
row = box.row()
row.label("UPDATES %d" % dm.updates)
'''
if dm.edit_driver and not ed:
#take out the edit driver
dm.edit_driver = None
row.label("OOPS")
'''
class DriverPanel(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'TOOLS'
bl_category = "Drivers"
bl_label = " "
def dic(self):
return(context.driver_manager.get_collection_dic(self.collection))
class EditDriverPanel(DriverPanel):
@classmethod
def poll(cls, context):
return False
#print("EDP", context.driver_manager.edit_driver is not None)
return context.driver_manager is not None and context.driver_manager.edit_driver is not None
def draw_header(self, context):
layout = self.layout
dm = context.driver_manager
ed = dm.edit_driver
op = layout.operator("driver.edit", text="EDIT DRIVER", icon='CANCEL')
op.dindex=ed.index
op.toggle=True
layout.prop(ed.gui, "gui_types")
def draw(self, context):
layout = self.layout
dm = context.driver_manager
if ed is not None:
dm.draw_layout(layout, context, dm.get_object_dic("xxx", "yyy"))
dm.driver_edit_draw(layout, context)
return None
class DriverCollectionPanel(DriverPanel):
"""Creates a Panel in the scene context of the properties editor"""
bl_label = " "
def context(self, context, obj):
'''
REFACTOR TO RETURN ob, keys
'''
if self.collection not in ["objects", "scenes"]:
return True
elif self.collection in ["scenes"]:
return obj == context.scene
elif self.collection in ["objects"]:
return context.object
elif self.collection in ["materials"]:
return context.object.active_material
'''
@classmethod
def unregister(cls):
print("URC", cls.bl_idname)
bpy.utils.unregister_class(cls)
'''
@classmethod
def poll(cls, context):
scene = context.scene
dm = context.driver_manager
do = getattr(scene, "driver_objects", None)
if do is None or dm is None:
return False
#return True
return(do.use_filters == getattr(do.filters, cls.collection)
and len(dm.get_collection_dic(cls.collection)))
def draw_header(self, context):
#self.layout.prop(context.scene, "use_gravity", text="")
collection = getattr(self, "collection", "objects")
self.layout.label(icon=get_icon(collection), text=collection.title())
def draw(self, context):
layout = self.layout
collection = self.collection
scene = context.scene
wm = context.window_manager
dm = context.driver_manager
search = getattr(scene.driver_objects, "search_%s" % self.collection, False)
index = getattr(scene.driver_objects, "active_%s_index" % self.collection, -1)
coll = getattr(scene.driver_objects, collection)
layout.prop(scene.driver_objects, "search_%s" % self.collection)
if getattr(scene.driver_objects, "search_%s" % self.collection):
ui = layout.template_list("SD_%s_ui_list" % self.collection,
"", scene.driver_objects,
self.collection, scene.driver_objects,
"active_%s_index" % self.collection, self.collection)
# get_driven_scene_objects(scene)
if self.collection.startswith("ob"):
#dic = dm.get_driven_scene_objects(scene)
ob = coll[index] if search else context.object
keys = [ob.name] if ob is not None else []
elif self.collection.startswith("sce"):
ob = context.scene
keys = [ob.name]
elif self.collection.startswith("mat"):
ob = context.object.active_material
keys = [ob.name] if ob is not None else []
elif self.collection.startswith("shape_keys"):
ob = getattr(context.object.data, "shape_keys", None)
keys = [ob.name] if ob is not None else []
else:
#dic = dm.get_collection_dic(type(self).collection)
ob = coll[index]
keys = [ob.name]
#keys = sorted(dic.keys())
if ob is None:
layout.label("NO CONTEXT OBJECT")
return None
dm.check_deleted_drivers()
dic = dm.get_object_dic(collection, ob.name)
obj = getattr(bpy.data, collection).get(ob.name)
# TO BE REFACTORED
col = layout.column(align=True)
icon = get_icon(obj.type) if hasattr(obj, "type") else icon_from_bpy_datapath(repr(obj))
row = col.row()
row.alignment = 'LEFT'
row.label(icon='DISCLOSURE_TRI_RIGHT', text="")
if not search and collection.startswith("obj"):
row.template_ID(context.scene.objects, "active")
else:
row.label(icon=icon, text=ob.name)
#obj = dm.find(dic[m][0][0]).driven_object.id_data '''
dm.draw_layout(col, context, dic)
#dm.check_added_drivers(ob)
return None
def register():
register_class(DriversManagerPanel)
register_class(EditDriverPanel)
propdic = {
"bl_space_type" : 'VIEW_3D',
"bl_region_type" : 'TOOLS',
"bl_category" : "Drivers",
}
for collection in bpy_collections:
propdic["collection"] = collection
bl_idname = "SD_%s_Panel" % collection
propdic["bl_idname"] = bl_idname
col_ui_list = type("SD_%s_ui_list" % collection, (DRIVER_UL_driven_objects,), {})
x = type(bl_idname, (DriverCollectionPanel,), propdic)
register_class(x)
register_class(col_ui_list)
def get_dm(self):
dns = bpy.app.driver_namespace
return dns.get("DriverManager")
bpy.types.Context.driver_manager = property(get_dm)
def unregister():
# should have done this earlier
unregister_class(DriversManagerPanel)
for c in bpy_collections:
unregister_class(getattr(bpy.types, "SD_%s_Panel" % c))
unregister_class(getattr(bpy.types, "SD_%s_ui_list" % c))
#bpy.utils.unregister_class(DriverCollectionPanel)
unregister_class(EditDriverPanel)
# Need to remove all of these
del bpy.types.Context.driver_manager
| [
"meta.androcto1@gmail.com"
] | meta.androcto1@gmail.com |
92e82e328ade7f03df9e0af8ba121385d3be56e6 | 98dde5ccdb145de9aab3e7233c3ec6c9c13a0649 | /controller/qt_classes/LineEditDelegate.py | eaedc896fc950ae5c89c31e9713ccb06d136e5a0 | [] | no_license | teamlm2/lm2 | f586aaf7af44cbb64964f2c2bfeffa3e902d4752 | 416cc189b6fc16bf61583891783eef7e4a9e1278 | refs/heads/master | 2018-12-22T18:38:19.360889 | 2018-10-04T02:12:11 | 2018-10-04T02:12:11 | 109,807,271 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,246 | py | __author__ = 'anna'
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class LineEditDelegate(QStyledItemDelegate):
def __init__(self, column, completer_value_list, parent):
super(LineEditDelegate, self).__init__(parent)
self.line_edit_column = column
self.completer_model = QStringListModel(completer_value_list)
self.completer_proxy_model = QSortFilterProxyModel()
self.completer_proxy_model.setSourceModel(self.completer_model)
self.completer = QCompleter(self.completer_proxy_model, self, activated=self.on_completer_activated)
self.completer.setCompletionMode(QCompleter.PopupCompletion)
self.parent = parent
def createEditor(self, widget, item, index):
if not index is None:
if index.isValid():
if index.column() == self.line_edit_column:
editor = QLineEdit(widget)
editor.setCompleter(self.completer)
editor.textEdited.connect(self.completer_proxy_model.setFilterFixedString)
return editor
@pyqtSlot(str)
def on_completer_activated(self, text):
if not text:
return
self.completer.activated[str].emit(text) | [
"aagii_csms@yahoo.com"
] | aagii_csms@yahoo.com |
6dbec1025dda199acee235c6639074adb8892917 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/rna-transcription/ea888a3beaac408f9f93b24fd31d9d7a.py | afffcc7b61868c9975cd535c55505f2650b87b53 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 490 | py | class DNA:
dna_sequence = ''
def __init__(self, value):
self.dna_sequence = value
def to_rna(self):
new_sequence = ''
for letter in self.dna_sequence:
if letter == 'G':
new_sequence += 'C'
elif letter == 'C':
new_sequence += 'G'
elif letter == 'T':
new_sequence += 'A'
elif letter == 'A':
new_sequence += 'U'
return new_sequence
| [
"rrc@berkeley.edu"
] | rrc@berkeley.edu |
cd676e15a7d03535de5ff911140dc2d8ab4aa212 | ed1a4b2dba31905ccac09136a693c2d5c7697de8 | /helpers/create_user_csv.py | e397736e82540a5b553663cbada881455e949a72 | [] | no_license | twkampala/dhis2-config-exercise | 5580666cebcdf5c65cbb81174670d87266e98c8a | 76f835ea36ec6df64c04d6a207ef09176161843b | refs/heads/master | 2021-01-01T16:06:12.658622 | 2015-07-08T03:49:09 | 2015-07-08T03:49:09 | 38,424,051 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 723 | py | from faker import Faker
def decide_role(n, roles):
if((n+1) % 2 == 0):
return roles[1]
else:
return roles[0]
def create_csv_file(path, number_of_users):
f = Faker()
roles = ['ordinary user', 'admin user']
default_password = "secretPassword"
with open(path, "w") as file_handle:
file_handle.write("First Name,Last Name,Username,Email Address,Role,Password\n")
for n in range(number_of_users):
role = decide_role(n, roles)
password = default_password
file_handle.write("%s,%s,%s,%s,%s,%s\n" % (f.first_name(), f.last_name(), f.username(), f.email(), role, password))
if __name__ == "__main__":
create_csv_file("users.csv", 3)
| [
"jmured@gmail.com"
] | jmured@gmail.com |
6aa7b9e0e5cd9dc45f0c134f9da1ce1c4e431b5d | bcdf30ab17d406643fb8ec01bafcd6cbf625bb44 | /product_brand_pricelist/__openerp__.py | 14df672457ad1d4cab78b820f20afd4fcb8344fe | [] | no_license | more2makeTim/odoo-extra-addons | a37915da3407b38cf3fcfbdbecb67435cb7e8f76 | ac81232e4d360d8cd645b2d3471da8779d77a4a5 | refs/heads/8.0 | 2020-04-21T17:56:16.337747 | 2018-07-06T10:42:48 | 2018-07-06T10:42:48 | 94,529,482 | 0 | 0 | null | 2017-06-16T09:39:22 | 2017-06-16T09:39:22 | null | UTF-8 | Python | false | false | 1,303 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2015 ERP|OPEN (www.erpopen.nl).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Product Brand Pricelist',
'author': 'ERP|OPEN, André Schenkels',
'version': '8.0.1.0.0',
'website': 'www.erpopen.nl',
'license': 'AGPL-3',
'category': 'Product',
'depends': [
'product',
'product_brand'
],
'data': [
'views/product_pricelist_item.xml',
]
}
| [
"a.schenkels@ictstudio.eu"
] | a.schenkels@ictstudio.eu |
494583cd122cebd875997029ca81b58f4782fc31 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/ATSWTCH2-MIB.py | 38408970de263898e851492763c790f7c84465ae | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 93,849 | py | #
# PySNMP MIB module ATSWTCH2-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ATSWTCH2-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:30:48 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, NotificationType, Integer32, iso, Unsigned32, MibIdentifier, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Bits, ObjectIdentity, enterprises, NotificationType, TimeTicks, ModuleIdentity, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "NotificationType", "Integer32", "iso", "Unsigned32", "MibIdentifier", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Bits", "ObjectIdentity", "enterprises", "NotificationType", "TimeTicks", "ModuleIdentity", "IpAddress")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
class MacAddress(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
class BridgeId(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(8, 8)
fixedLength = 8
class Timeout(Integer32):
pass
alliedTelesyn = MibIdentifier((1, 3, 6, 1, 4, 1, 207))
atiProduct = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 1))
mibObject = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8))
atswitchMib = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10))
atswitchSysGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 1))
atswitchConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 2))
atswitchPortConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 3))
atswitchVlanConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 4))
atswitchEthernetStatsGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 5))
atswitchEthPortStatsGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 6))
atswitchFwdVlanGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 7))
atswitchTrapAttrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 8))
atswitchBridgeMib = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 9))
atswitchStaticMACGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 10))
atswitchPortMacAddrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 12))
atswitchDebugMallocLogGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 13))
atswitchBrBase = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1))
atswitchBrStp = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2))
atswitchBrTp = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3))
atswitchProductType = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 10))).clone(namedValues=NamedValues(("at-3726", 1), ("at-3714", 2), ("at-8124XL", 3), ("at-8118", 4), ("at-3726XL", 5), ("at-3714FXL", 6), ("at-3716XL", 7), ("other", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchProductType.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchProductType.setDescription(' This object will return Product Type. ')
atswitchEthernetPortCount = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthernetPortCount.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthernetPortCount.setDescription(' This object will return the number of 10/100 Mbps Ethernet ports on the switch. ')
atswitchReset = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("switch-no-reset", 1), ("switch-reset", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchReset.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchReset.setDescription(" Setting this object to 'switch-reset' will cause the switch to perform a hardware reset within approximately 4-6 seconds. Setting this object to 'switch-no-reset will have no effect. The value 'no-reset' will be returned whenever this object is retrieved. ")
atswitchMDA1Type = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("rj45-mii", 1), ("fiber", 2), ("none", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchMDA1Type.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchMDA1Type.setDescription(' This object returns the MDA type of the Uplink port ')
atswitchMDA2Type = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("rj45-mii", 1), ("fiber", 2), ("none", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchMDA2Type.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchMDA2Type.setDescription(" This Object is supported in 81XX product line only. This object returns the MDA type of the Uplink Port named 'B'. It returns 'none' if a 'B' MDA slot is not installed. ")
atswitchDeviceFlowControl = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchDeviceFlowControl.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchDeviceFlowControl.setDescription(' This Object is supported on 81XX products only. This object configures the Flow Control of the Device ')
atswitchSwGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 7))
atswitchIpGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8))
atswitchNMGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 9))
atswitchSwProduct = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 7, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchSwProduct.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchSwProduct.setDescription('Identifies the software product the device is running. ')
atswitchSwVersion = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 7, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchSwVersion.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchSwVersion.setDescription(' Identifies the version number of the present release. ')
atswitchCurrentIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchCurrentIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchCurrentIpAddress.setDescription(' The Current IP address is the one which is currently used and is obtained dynamically through one of the protocols interaction.( DHCP or Bootp.) This address is NULL if the Address is Statically configured. ')
atswitchConfiguredIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchConfiguredIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchConfiguredIpAddress.setDescription(' The Configured IP address of the device. This is the address configured through Network or Local Omega. ')
atswitchConfiguredSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchConfiguredSubnetMask.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchConfiguredSubnetMask.setDescription(' The Configured Subnet Mask of the device.')
atswitchConfiguredRouter = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchConfiguredRouter.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchConfiguredRouter.setDescription(' The Configured Gateway/Router address of the device')
atswitchIPAddressStatus = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("from-dhcp", 1), ("from-bootp", 2), ("from-psuedoip", 3), ("from-Omega", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchIPAddressStatus.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchIPAddressStatus.setDescription(' The IP Address can be obtained/configured by any of the above different ways. This object specifies how IP address currently on the switch Box, was configured/obtained. ')
atswitchDNServer = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchDNServer.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchDNServer.setDescription(' The Configured DNS Server address of the device')
atswitchDefaultDomainName = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 8, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchDefaultDomainName.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchDefaultDomainName.setDescription(' This Object defines the Default Domain where this switch can be belong to. ')
atswitchNwMgrTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 9, 1), )
if mibBuilder.loadTexts: atswitchNwMgrTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchNwMgrTable.setDescription(' A list of SNMP Trap Manager stations Entries. The number of entries is given by the switchNwMgrTotal mib object. ')
atswitchNwMgrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 9, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchNwMgrIndex"))
if mibBuilder.loadTexts: atswitchNwMgrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchNwMgrEntry.setDescription(" Trap receipt Manager Entry containing ipaddress of the configured NMS's to which Traps are sent. ")
atswitchNwMgrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 9, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchNwMgrIndex.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchNwMgrIndex.setDescription(' The Index of the Managers Ip address. ')
atswitchNwMgrIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 1, 9, 1, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchNwMgrIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchNwMgrIpAddr.setDescription(' The IP Address of the NMS host configured. ')
atswitchPortDisableOnSecurityViolation = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disable-on-security-voilation", 1), ("suspend-on-double-address", 2), ("security-not-yet-initalized", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortDisableOnSecurityViolation.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortDisableOnSecurityViolation.setDescription(' This System wide attribute allows the port to be disabled on Violation of the Security. or suspend the port due to Duplicate Address. Needs More explaination after somebody figures out whether atswitch needs the suspend state. ')
atswitchMirroringSourcePort = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchMirroringSourcePort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchMirroringSourcePort.setDescription(" This is the Source port number for which there is another mirror port.If the atswitchEthConfigMirrorState is Enabled then ,the mirror portgets routed with all the packets going in and out of Source port. This arrangement is to put an RMON Probe on mirrored port to Probe the traffic on the Source port. One of the port is dedicated to this so that for any port as source port, this dedicated port can be a mirrored port. This object will return a '0' if the MirrorState is not enabled. For the AT-81XX only the receive activity can be mirrored. ")
atswitchMirrorState = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("receive", 1), ("transmit", 2), ("both", 3), ("disabled", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchMirrorState.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchMirrorState.setDescription(' if the state of Mirroring is enabled by selecting one of the first three values , then the Mirroring explained above works. If disabled, port operation works normally. No Traffic gets routed from MirroringSourcePort to Destination Mirrored Port. Mirroring of both transmit and receive activity can only be done if the port is half duplex. Choose both only if you know the port will be run in half duplex mode. ')
atswitchMirroringDestinationPort = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchMirroringDestinationPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchMirroringDestinationPort.setDescription(" This is the Destination port number for which there is another mirror port.If the atswitchEthConfigMirrorState is Enabled then ,the mirror portgets routed with all the packets going in and out of Destination port. This arrangement is to put an RMON Probe on mirrored port to Probe the traffic on the Destination port. One of the port is dedicated to this so that for any port as destination port, this dedicated port can be a mirrored port. This object will return a '0' if the MirrorState is not enabled. For the AT-81XX only the receive activity can be mirrored. ")
atswitchSecurityConfig = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("enabled-with-learning-locked", 2), ("limited-enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchSecurityConfig.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchSecurityConfig.setDescription(' This Object is suppported only in 37XX product Line. Security feature configuration Object. The Security disable would let the device carry on the learn-new-address-as-it-comes-in mode as usual. When security is enabled-with-learning-locked, the learning of new address is stopped and the addresses locked in the device is used as the security Database. If an address comes in which is not present in the Device Security Database, then any of the atswitchSecurityAction Configured is triggered. When limited-enabled is selected, a per-port atswitchPortSecurityNumberOfAddresses specify the max number of MACs to be learned . ')
atswitchSecurityAction = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("send-trap-only", 1), ("disable-port-only", 2), ("disable-port-and-send-trap", 3), ("do-nothing", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchSecurityAction.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchSecurityAction.setDescription(' Security Action to be carried when the atswitchSecurityConfig is enabled-with-learning-locked or limted-enabled. ')
atswitchDebugAvailableBytes = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchDebugAvailableBytes.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchDebugAvailableBytes.setDescription(' This is strictly for Debug reason and this object should not be beleived as the correct number ')
atswitchTrunkConfig = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 2, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchTrunkConfig.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchTrunkConfig.setDescription(' This Object Configures the Trunking for the Uplink Ports only. This feature is currently available only in Fenix 37XX-XL hardware. ')
atswitchPortTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1), )
if mibBuilder.loadTexts: atswitchPortTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortTable.setDescription('Table of basic port configuration information.')
atswitchPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchPortNumber"))
if mibBuilder.loadTexts: atswitchPortEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortEntry.setDescription('An entry in the port config table.')
atswitchPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchPortNumber.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortNumber.setDescription('This object identifies the port of the switch.')
atswitchPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortName.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortName.setDescription('This attribute associates a user defined string name with the port.')
atswitchPortAutosenseOrHalfDuplex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("portAutoSense", 1), ("forceHalfDuplex", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortAutosenseOrHalfDuplex.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortAutosenseOrHalfDuplex.setDescription('This attribute allows an administrative request to configure whether this port can Autosense or Force the Half Duplex ')
atswitchPortLinkState = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("online", 1), ("offline", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchPortLinkState.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortLinkState.setDescription('This attribute allows an administrative request to read the status of link state on this port.')
atswitchPortDuplexStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("fullDuplex", 1), ("halfDuplex", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchPortDuplexStatus.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortDuplexStatus.setDescription('This attribute allows an administrative request to read the status of Duplex on this port.')
atswitchPortSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("tenMBits", 1), ("hundredMBits", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortSpeed.setDescription(' This attribute allows an administrative request to read/write the speed of this port. This attribure is read-only for all the Ports on AT-37XX product . This Attribute is read-write for ATS21 (AT81XX) product Only. This attribute allows an administrative request to change the speed on ATS21 product.')
atswitchPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2), ("blocking", 3), ("listening", 4), ("learning", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortState.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortState.setDescription('This attribute allows an administrative request to disable or enable communications on this port.It also responds with the status of the port .Except enabled(1) and disabled(2), all values are read-only status. ')
atswitchPortTransmitPacingConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortTransmitPacingConfig.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortTransmitPacingConfig.setDescription('This Object is supported on AT36XX product line Only. This attribute allows the transmit Pacing to be enabled or disabled. ')
atswitchPortSTPConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortSTPConfig.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortSTPConfig.setDescription('This attribute allows a bridging Mode to be configured with either Spanning Tree enabled or disabled. When Spanning tree is enabled, make sure that this port is belonging to a valid Bridge_id. Spanning Tree is enabled only when a valid Bridge_id is set. ')
atswitchPortBridgeid = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortBridgeid.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortBridgeid.setDescription(' The Bridge to which this port belongs to . ')
atswitchPortSTPCost = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortSTPCost.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortSTPCost.setDescription(' The Cost of the Spanning Tree Protocol.This object is valid only when STP is enabled. ')
atswitchPortSTPPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortSTPPriority.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortSTPPriority.setDescription(' The Priority of the spanning Tree Protocol. This object is valid when STP is enabled. ')
atswitchPortSwitchingType = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("fast-cut-through", 1), ("store-and-forward", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortSwitchingType.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortSwitchingType.setDescription('This per-port attribute describes whether the port identified by atswitchPortNumber uses store-and-forward bridging for local switched Ethernet packets or cut-through. The Hardware allows cut-through on Transmission and cut-through-on Receive and the same for the store-and-forward. But some of the Config option does not make any sense. So, for now there is no distiction of Rx and Tx. Both Tx/Rx support store-and-forward or cut-through. This Object is not applicable for 37XX with XL hardware as the Hardware does not support both Cut through and Store and Forward. It is default Store and Forward. Even though the Set on either of them is done, Hardware does not do the Cut through Switching. ')
atswitchPortFlowControlEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortFlowControlEnable.setStatus('deprecated')
if mibBuilder.loadTexts: atswitchPortFlowControlEnable.setDescription(' This Per_port attribute is only for ATS21 product.(81XX). This per-port attribute describes whether the port identified has flow Control Enabled or not. Flow Control on Full Duplex and Half Duplex is detected and Automatically, flow control accordingly is taken care of. BY Default, Flow Control is Disabled. ')
atswitchPortSecurityNumberOfAddresses = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 3, 1, 1, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPortSecurityNumberOfAddresses.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortSecurityNumberOfAddresses.setDescription(' This object configures the Max number of addresses at an instance to be learnt by the device. If the learnt addresses number is above this number, then the atswitchSecurityAction is triggered . when the number of learnt addresses exceeds this number ,the the atswitchSecurityAction is triggered only if atswitchSecurityConfig is set to limited-enalbled. ELse nothing is triggered. ')
atswitchBasicVlanTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 1), )
if mibBuilder.loadTexts: atswitchBasicVlanTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBasicVlanTable.setDescription('Table of Virtual LAN configured.')
atswitchBasicVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBeVlanIndex"))
if mibBuilder.loadTexts: atswitchBasicVlanEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBasicVlanEntry.setDescription('An entry in the table, containing VLAN information.')
atswitchBeVlanIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBeVlanIndex.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBeVlanIndex.setDescription('This object identifies the VLAN.')
atswitchBeVlanName = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBeVlanName.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBeVlanName.setDescription("This attribute associates a user defined string with the Virtual LAN. To configure a new VLAN, do 'set' operation on this object with the VLAN name. To delete an VLAN, do 'set' operation with string '*'. Before configuring any of the objects in this row , set the atswitchVlanStatus to 'under-construction' and once configured with all the information, please set the same object to operational ")
atswitchBeVlanTagId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBeVlanTagId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBeVlanTagId.setDescription(" This object is supported by 37xx series of fenix with the VLAN tagging capability hardware only. Please check whether the SysObjectId is greater than 29. This object configures the VId in the Tag Information header in accordance with 802.1q spec. Before configuring any of the objects in this row , set the atswitchVlanStatus to 'under-construction' and once configured with all the information, please set the same object to operational ")
atswitchBeVlanPortMask = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBeVlanPortMask.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBeVlanPortMask.setDescription(" This object is supported by 37xx series of fenix with the VLAN tagging capability hardware only. Please check whether the SysObjectId is greater than 29. This Object builds the Output Ports that participate in the Vlan with the atswitchBeVlanName. The Format of the input string would be like '1,2,5,7,12'. Before configuring any of the objects in this row , set the atswitchVlanStatus to 'under-construction' and once configured with all the information, please set the same object to operational ")
atswitchBeVlanRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("idle", 1), ("operational", 2), ("under-construction", 3), ("not-operational", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBeVlanRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBeVlanRowStatus.setDescription(" This object is supported by 37xx series of fenix with the VLAN tagging capability hardware only. Please check whether the SysObjectId is greater than 29. This object identifies the atswitchBeVLANEntry Row Status . The User sets this object to under-construction, to start with to create a row. Once the User Configures the VlanName and TagId and also the Output Ports.This object should be set to operational. If the user does not set this object to operational, the whole row is lost .when a 'get' of this row is done, this uncommited row is not seen. configured in the switch. ")
atswitchPort2VlanTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 2), )
if mibBuilder.loadTexts: atswitchPort2VlanTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPort2VlanTable.setDescription('Table of per port Virtual LAN configuration.')
atswitchPort2VlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 2, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchPvPortNumber"))
if mibBuilder.loadTexts: atswitchPort2VlanEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPort2VlanEntry.setDescription('An entry in the table, containing per port VLAN information.')
atswitchPvPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchPvPortNumber.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPvPortNumber.setDescription('This object identifies the port on the switch.')
atswitchPvVlanName = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 4, 2, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchPvVlanName.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPvVlanName.setDescription('This attribute associates a user defined string with the Virtual LAN. This Object is the same as atswitchBeVlanName. Please make sure to give the same string as atswitchBeVlanName.')
atswitchEthMonStats = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 1))
atswitchEthErrorStats = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 2))
atswitchEthMonRxGoodFrames = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthMonRxGoodFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthMonRxGoodFrames.setDescription(' The total number of Good Frames received on this module. ')
atswitchEthMonTxGoodFrames = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthMonTxGoodFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthMonTxGoodFrames.setDescription(' The total number of Good Frames Transmitted by this module. ')
atswitchEthMonTxTotalBytes = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthMonTxTotalBytes.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthMonTxTotalBytes.setDescription(' The total number of Bytes transmitted from this module. ')
atswitchEthMonTxDeferred = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthMonTxDeferred.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthMonTxDeferred.setDescription(' This is the count of first time Transmission attempt which failed on an interface due to medium being busy. ')
atswitchEthMonTxCollisions = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthMonTxCollisions.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthMonTxCollisions.setDescription(' The total number of collisions while switching on an interface. ')
atswitchEthErrorCRC = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthErrorCRC.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthErrorCRC.setDescription(' The total number of CRC errors on received packets. ')
atswitchEthErrorAlignment = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthErrorAlignment.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthErrorAlignment.setDescription(' The total number of packets received that has alignment errors ')
atswitchEthErrorRxBadFrames = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 2, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthErrorRxBadFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthErrorRxBadFrames.setDescription(' The counter is incremented when a bad frame was received. ')
atswitchEthErrorLateCollisions = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 2, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthErrorLateCollisions.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthErrorLateCollisions.setDescription(' This object counts the number of times the collison was detected in the port. ')
atswitchEthErrorTxTotal = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 5, 2, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthErrorTxTotal.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthErrorTxTotal.setDescription(' Total number of error resulted from transfer operations. ')
atswitchEthPortMonStats = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1))
atswitchEthPortError = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 2))
atswitchEthPortMonTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1), )
if mibBuilder.loadTexts: atswitchEthPortMonTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortMonTable.setDescription(' A list of statistics entries.')
atswitchEthPortMonEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchEthPortMonId"))
if mibBuilder.loadTexts: atswitchEthPortMonEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortMonEntry.setDescription(' A collection of statistics kept for a particular port. ')
atswitchEthPortMonId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthPortMonId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortMonId.setDescription(' The relative position of a port within a switch starting with 1. ')
atswitchEthPortMonTxTotalBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthPortMonTxTotalBytes.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortMonTxTotalBytes.setDescription(' The total number of Bytes transmited to this port. ')
atswitchRxGoodFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchRxGoodFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchRxGoodFrames.setDescription(' The total number of good packets(including unicast,broadcast packets and multicast packets) received ')
atswitchTxGoodFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchTxGoodFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchTxGoodFrames.setDescription(' The total number of good packets(including bad packets, broadcast packets and multicast packets) transmitted successfully ')
atswitchTxBroadcastFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchTxBroadcastFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchTxBroadcastFrames.setDescription(' The total number of packets transmitted that were directed to the broadcast address. This does not include multicast packets ')
atswitchTxMulticastFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchTxMulticastFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchTxMulticastFrames.setDescription(' The total number of packets transmitted that were directed to a multicast address. This does not include packets addressed to the broadcast address ')
atswitchAddrDuplicate = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchAddrDuplicate.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchAddrDuplicate.setDescription(' The number of address duplications seen on this port which is secured on another port. If a source address is locked for a port and if that address is eeen as source address on this port, then such occurances of duplicate Address is noted. ')
atswitchAddrMismatches = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchAddrMismatches.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchAddrMismatches.setDescription(' Address mismatches /address changes the sum of : 1. The number of mismatches seen on a port between a securely assigned port address and the source address observed on the port. Occurrence of this causes the TNETX3150 to suspend the port. 2. The number of times the TNETX3150 is required to assign or learn an address for a port. ')
atswitchRxOverruns = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 1, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchRxOverruns.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchRxOverruns.setDescription(' The number of frames lost due to lack of resources during frame reception. The counter is incremented when frame data cannot enter the RX fifo for whatever reason. Frames that overrun after entering the fifo also can be counted as RX discards if they are not cut-through. ')
atswitchEthPortErrorTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 2, 1), )
if mibBuilder.loadTexts: atswitchEthPortErrorTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortErrorTable.setDescription(' A list of statistics entries.')
atswitchEthPortErrorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 2, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchEthPortErrorId"))
if mibBuilder.loadTexts: atswitchEthPortErrorEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortErrorEntry.setDescription(' A collection of statistics kept for a particular port. ')
atswitchEthPortErrorId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthPortErrorId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortErrorId.setDescription(' The relative position of a port within a switch starting with 1. ')
atswitchEthPortErrorRxBadFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 2, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthPortErrorRxBadFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortErrorRxBadFrames.setDescription(' The counter is incremented when a bad frame was received on this port. ')
atswitchEthPortErrorTxTotal = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 6, 2, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchEthPortErrorTxTotal.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchEthPortErrorTxTotal.setDescription(' Total number of error resulted from transmit operations on this port. ')
atswitchFwdVlanTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 7, 1), )
if mibBuilder.loadTexts: atswitchFwdVlanTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchFwdVlanTable.setDescription(' This tables lists Mac Addresses lexicographically from the forwarding table and also the information about the Vlan/Vlan which this MAC address belongs to. ')
atswitchFwdVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 7, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchFwdVlanMACAddr"))
if mibBuilder.loadTexts: atswitchFwdVlanEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchFwdVlanEntry.setDescription(' Entry in the atswitchFwdVlanTable. ')
atswitchFwdVlanMACAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 7, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchFwdVlanMACAddr.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchFwdVlanMACAddr.setDescription('This object identifies the MAC Address associated with this entry.')
atswitchFwdVlanVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 7, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchFwdVlanVlanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchFwdVlanVlanId.setDescription('This object identifies the Vlan/Vlan which MAC Address defined by the atswitchFwdMACAddr is associated with. ')
atswitchFwdVlanAge = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 7, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchFwdVlanAge.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchFwdVlanAge.setDescription('Current Age of the MACAddr in the Forwarding Table.It Starts with 0 and if not heard it increments to some value until it hears from it and if not heard before reaching Max value, it will purge this entry. ')
atswitchFwdVlanStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2), ("other", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchFwdVlanStatus.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchFwdVlanStatus.setDescription('The status of this entry. The meanings of the values are: inactive(1) : this entry is not longer valid (e.g., it was learned but has since aged-out), but has not yet been flushed from the table. active(2) : the value of the corresponding instance of atswitchFwdVlanPort was active, and is being used. other(3) : none of the following. ')
atswitchFwdVlanPort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 7, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchFwdVlanPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchFwdVlanPort.setDescription("Either the value '0', or the port number of the port on which a frame having a source address equal to the value of the corresponding instance of atswitchFwdVlanMACAddr has been seen. A value of '0' indicates that the port number has not been learned but that the bridge does have some forwarding/filtering information about this address. Implementors are encouraged to assign the port value to this object whenever it is learned even for addresses for which the corresponding value of atswitchFwdVlanStatus is not learned(3). ")
atswitchBrBaseTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 1), )
if mibBuilder.loadTexts: atswitchBrBaseTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBaseTable.setDescription('Table of basic bridge information.')
atswitchBrBaseEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBrBaseLanId"))
if mibBuilder.loadTexts: atswitchBrBaseEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBaseEntry.setDescription('An entry in the atswitchBrBaseTable.')
atswitchBrBaseLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBaseLanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBaseLanId.setDescription('This object uniquely identifies the lan or Virtual lan.')
atswitchBrBaseBridgeAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 1, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBaseBridgeAddress.setReference('IEEE 802.1D-1990: Sections 6.4.1.1.3 and 3.12.5')
if mibBuilder.loadTexts: atswitchBrBaseBridgeAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBaseBridgeAddress.setDescription('The MAC address used by this bridge when it must be referred to in a unique fashion. It is recommended that this be the numerically smallest MAC address of all ports that belong to this bridge. However it is only required to be unique. When concatenated with atswitchBrStpPriority a unique BridgeIdentifier is formed which is used in the Spanning Tree Protocol.')
atswitchBrBaseNumPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBaseNumPorts.setReference('IEEE 802.1D-1990: Section 6.4.1.1.3')
if mibBuilder.loadTexts: atswitchBrBaseNumPorts.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBaseNumPorts.setDescription('The number of ports controlled by this bridging entity.')
atswitchBrBaseType = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("transparent-only", 2), ("sourceroute-only", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBaseType.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBaseType.setDescription('Indicates what type of bridging this bridge can perform. If a bridge is actually performing a certain type of bridging this will be indicated by entries in the port table for the given type.')
atswitchBrBasePortTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4), )
if mibBuilder.loadTexts: atswitchBrBasePortTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePortTable.setDescription('A table that contains generic information about every port that is associated with this bridge. Transparent, source-route, and srt ports are included.')
atswitchBrBasePortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBrBasePortLanId"), (0, "ATSWTCH2-MIB", "atswitchBrBasePort"))
if mibBuilder.loadTexts: atswitchBrBasePortEntry.setReference('IEEE 802.1D-1990: Section 6.4.2, 6.6.1')
if mibBuilder.loadTexts: atswitchBrBasePortEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePortEntry.setDescription('A list of information for each port of the bridge.')
atswitchBrBasePortLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBasePortLanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePortLanId.setDescription('This object uniquely identifies the lan or Virtual lan.')
atswitchBrBasePort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBasePort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePort.setDescription('The port number of the port for which this entry contains bridge management information.')
atswitchBrBasePortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBasePortIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePortIfIndex.setDescription('The value of the instance of the ifIndex object, defined in MIB-II, for the interface corresponding to this port.')
atswitchBrBasePortCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4, 1, 4), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBasePortCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePortCircuit.setDescription('For a port which (potentially) has the same value of atswitchBrBasePortIfIndex as another port on the same bridge, this object contains the name of an object instance unique to this port. For example, in the case where multiple ports correspond one- to-one with multiple X.25 virtual circuits, this value might identify an (e.g., the first) object instance associated with the X.25 virtual circuit corresponding to this port. For a port which has a unique value of atswitchBrBasePortIfIndex, this object can have the value { 0 0 }.')
atswitchBrBasePortDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBasePortDelayExceededDiscards.setReference('IEEE 802.1D-1990: Section 6.6.1.1.3')
if mibBuilder.loadTexts: atswitchBrBasePortDelayExceededDiscards.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePortDelayExceededDiscards.setDescription('The number of frames discarded by this port due to excessive transit delay through the bridge. It is incremented by both transparent and source route bridges.')
atswitchBrBasePortMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 1, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrBasePortMtuExceededDiscards.setReference('IEEE 802.1D-1990: Section 6.6.1.1.3')
if mibBuilder.loadTexts: atswitchBrBasePortMtuExceededDiscards.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrBasePortMtuExceededDiscards.setDescription('The number of frames discarded by this port due to an excessive size. It is incremented by both transparent and source route bridges.')
atswitchBrStpTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1), )
if mibBuilder.loadTexts: atswitchBrStpTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpTable.setDescription('Table of bridge spanning tree information.')
atswitchBrStpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBrStpLanId"))
if mibBuilder.loadTexts: atswitchBrStpEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpEntry.setDescription('An entry in the atswitchBrStpTable.')
atswitchBrStpLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpLanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpLanId.setDescription('This object uniquely identifies the lan or Virtual lan.')
atswitchBrStpProtocolSpecification = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("decLb100", 2), ("ieee8021d", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpProtocolSpecification.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpProtocolSpecification.setDescription("An indication of what version of the Spanning Tree Protocol is being run. The value 'decLb100(2)' indicates the DEC LANbridge 100 Spanning Tree protocol. IEEE 802.1d implementations will return 'ieee8021d(3)'. If future versions of the IEEE Spanning Tree Protocol are released that are incompatible with the current version a new value will be defined.")
atswitchBrStpPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrStpPriority.setReference('IEEE 802.1D-1990: Section 4.5.3.7')
if mibBuilder.loadTexts: atswitchBrStpPriority.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPriority.setDescription('The value of the write-able portion of the Bridge ID, i.e., the first two octets of the (8 octet long) Bridge ID. The other (last) 6 octets of the Bridge ID are given by the value of atswitchBrBaseBridgeAddress.')
atswitchBrStpTimeSinceTopologyChange = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 4), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpTimeSinceTopologyChange.setReference('IEEE 802.1D-1990: Section 6.8.1.1.3')
if mibBuilder.loadTexts: atswitchBrStpTimeSinceTopologyChange.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpTimeSinceTopologyChange.setDescription('The time (in hundredths of a second) since the last time a topology change was detected by the bridge entity.')
atswitchBrStpTopChanges = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpTopChanges.setReference('IEEE 802.1D-1990: Section 6.8.1.1.3')
if mibBuilder.loadTexts: atswitchBrStpTopChanges.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpTopChanges.setDescription('The total number of topology changes detected by this bridge since the management entity was last reset or initialized.')
atswitchBrStpDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 6), BridgeId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpDesignatedRoot.setReference('IEEE 802.1D-1990: Section 4.5.3.1')
if mibBuilder.loadTexts: atswitchBrStpDesignatedRoot.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpDesignatedRoot.setDescription('The bridge identifier of the root of the spanning tree as determined by the Spanning Tree Protocol as executed by this node. This value is used as the Root Identifier parameter in all Configuration Bridge PDUs originated by this node.')
atswitchBrStpRootCost = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpRootCost.setReference('IEEE 802.1D-1990: Section 4.5.3.2')
if mibBuilder.loadTexts: atswitchBrStpRootCost.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpRootCost.setDescription('The cost of the path to the root as seen from this bridge.')
atswitchBrStpRootPort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpRootPort.setReference('IEEE 802.1D-1990: Section 4.5.3.3')
if mibBuilder.loadTexts: atswitchBrStpRootPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpRootPort.setDescription('The port number of the port which offers the lowest cost path from this bridge to the root bridge.')
atswitchBrStpMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 9), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpMaxAge.setReference('IEEE 802.1D-1990: Section 4.5.3.4')
if mibBuilder.loadTexts: atswitchBrStpMaxAge.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpMaxAge.setDescription('The maximum age of Spanning Tree Protocol information learned from the network on any port before it is discarded, in units of hundredths of a second. This is the actual value that this bridge is currently using.')
atswitchBrStpHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 10), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpHelloTime.setReference('IEEE 802.1D-1990: Section 4.5.3.5')
if mibBuilder.loadTexts: atswitchBrStpHelloTime.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpHelloTime.setDescription('The amount of time between the transmission of Configuration bridge PDUs by this node on any port when it is the root of the spanning tree or trying to become so, in units of hundredths of a second. This is the actual value that this bridge is currently using.')
atswitchBrStpHoldTime = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpHoldTime.setReference('IEEE 802.1D-1990: Section 4.5.3.14')
if mibBuilder.loadTexts: atswitchBrStpHoldTime.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpHoldTime.setDescription('This time value determines the interval length during which no more than two Configuration bridge PDUs shall be transmitted by this node, in units of hundredths of a second.')
atswitchBrStpForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 12), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpForwardDelay.setReference('IEEE 802.1D-1990: Section 4.5.3.6')
if mibBuilder.loadTexts: atswitchBrStpForwardDelay.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpForwardDelay.setDescription('This time value, measured in units of hundredths of a second, controls how fast a port changes its spanning state when moving towards the Forwarding state. The value determines how long the port stays in each of the Listening and Learning states, which precede the Forwarding state. This value is also used, when a topology change has been detected and is underway, to age all dynamic entries in the Forwarding Database. [Note that this value is the one that this bridge is currently using, in contrast to atswitchBrStpBridgeForwardDelay which is the value that this bridge and all others would start using if/when this bridge were to become the root.]')
atswitchBrStpBridgeMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 13), Timeout().subtype(subtypeSpec=ValueRangeConstraint(600, 4000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrStpBridgeMaxAge.setReference('IEEE 802.1D-1990: Section 4.5.3.8')
if mibBuilder.loadTexts: atswitchBrStpBridgeMaxAge.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpBridgeMaxAge.setDescription('The value that all bridges use for MaxAge when this bridge is acting as the root. Note that 802.1D-1990 specifies that the range for this parameter is related to the value of atswitchBrStpBridgeHelloTime. The granularity of this timer is specified by 802.1D-1990 to be 1 second. An agent may return a badValue error if a set is attempted to a value which is not a whole number of seconds.')
atswitchBrStpBridgeHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 14), Timeout().subtype(subtypeSpec=ValueRangeConstraint(100, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrStpBridgeHelloTime.setReference('IEEE 802.1D-1990: Section 4.5.3.9')
if mibBuilder.loadTexts: atswitchBrStpBridgeHelloTime.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpBridgeHelloTime.setDescription('The value that all bridges use for HelloTime when this bridge is acting as the root. The granularity of this timer is specified by 802.1D- 1990 to be 1 second. An agent may return a badValue error if a set is attempted to a value which is not a whole number of seconds.')
atswitchBrStpBridgeForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 1, 1, 15), Timeout().subtype(subtypeSpec=ValueRangeConstraint(400, 3000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrStpBridgeForwardDelay.setReference('IEEE 802.1D-1990: Section 4.5.3.10')
if mibBuilder.loadTexts: atswitchBrStpBridgeForwardDelay.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpBridgeForwardDelay.setDescription('The value that all bridges use for ForwardDelay when this bridge is acting as the root. Note that 802.1D-1990 specifies that the range for this parameter is related to the value of atswitchBrStpBridgeMaxAge. The granularity of this timer is specified by 802.1D-1990 to be 1 second. An agent may return a badValue error if a set is attempted to a value which is not a whole number of seconds.')
atswitchBrStpPortTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15), )
if mibBuilder.loadTexts: atswitchBrStpPortTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortTable.setDescription('A table that contains port-specific information for the Spanning Tree Protocol.')
atswitchBrStpPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBrStpPortLanId"), (0, "ATSWTCH2-MIB", "atswitchBrStpPort"))
if mibBuilder.loadTexts: atswitchBrStpPortEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortEntry.setDescription('A list of information maintained by every port about the Spanning Tree Protocol state for that port.')
atswitchBrStpPortLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPortLanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortLanId.setDescription('This object uniquely identifies the lan or Virtual lan.')
atswitchBrStpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPort.setReference('IEEE 802.1D-1990: Section 6.8.2.1.2')
if mibBuilder.loadTexts: atswitchBrStpPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPort.setDescription('The port number of the port for which this entry contains Spanning Tree Protocol management information.')
atswitchBrStpPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrStpPortPriority.setReference('IEEE 802.1D-1990: Section 4.5.5.1')
if mibBuilder.loadTexts: atswitchBrStpPortPriority.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortPriority.setDescription('The value of the priority field which is contained in the first (in network byte order) octet of the (2 octet long) Port ID. The other octet of the Port ID is given by the value of atswitchBrStpPort.')
atswitchBrStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPortState.setReference('IEEE 802.1D-1990: Section 4.5.5.2')
if mibBuilder.loadTexts: atswitchBrStpPortState.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortState.setDescription("The port's current state as defined by application of the Spanning Tree Protocol. This state controls what action a port takes on reception of a frame. If the bridge has detected a port that is malfunctioning it will place that port into the broken(6) state. For ports which are disabled (see atswitchBrStpPortEnable), this object will have a value of disabled(1).")
atswitchBrStpPortEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrStpPortEnable.setReference('IEEE 802.1D-1990: Section 4.5.5.2')
if mibBuilder.loadTexts: atswitchBrStpPortEnable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortEnable.setDescription('The enabled/disabled status of the port.')
atswitchBrStpPortPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrStpPortPathCost.setReference('IEEE 802.1D-1990: Section 4.5.5.3')
if mibBuilder.loadTexts: atswitchBrStpPortPathCost.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortPathCost.setDescription('The contribution of this port to the path cost of paths towards the spanning tree root which include this port. 802.1D-1990 recommends that the default value of this parameter be in inverse proportion to the speed of the attached LAN.')
atswitchBrStpPortDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 7), BridgeId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedRoot.setReference('IEEE 802.1D-1990: Section 4.5.5.4')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedRoot.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedRoot.setDescription('The unique Bridge Identifier of the Bridge recorded as the Root in the Configuration BPDUs transmitted by the Designated Bridge for the segment to which the port is attached.')
atswitchBrStpPortDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedCost.setReference('IEEE 802.1D-1990: Section 4.5.5.5')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedCost.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedCost.setDescription('The path cost of the Designated Port of the segment connected to this port. This value is compared to the Root Path Cost field in received bridge PDUs.')
atswitchBrStpPortDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 9), BridgeId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedBridge.setReference('IEEE 802.1D-1990: Section 4.5.5.6')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedBridge.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedBridge.setDescription("The Bridge Identifier of the bridge which this port considers to be the Designated Bridge for this port's segment.")
atswitchBrStpPortDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedPort.setReference('IEEE 802.1D-1990: Section 4.5.5.7')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortDesignatedPort.setDescription("The Port Identifier of the port on the Designated Bridge for this port's segment.")
atswitchBrStpPortForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 2, 15, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrStpPortForwardTransitions.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrStpPortForwardTransitions.setDescription('The number of times this port has transitioned from the Learning state to the Forwarding state.')
atswitchBrTpTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 1), )
if mibBuilder.loadTexts: atswitchBrTpTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpTable.setDescription('Table of transparent bridging information.')
atswitchBrTpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBrTpLanId"))
if mibBuilder.loadTexts: atswitchBrTpEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpEntry.setDescription('An entry in the atswitchBrTpTable.')
atswitchBrTpLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpLanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpLanId.setDescription('This object uniquely identifies the lan or Virtual lan.')
atswitchBrTpLearnedEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpLearnedEntryDiscards.setReference('IEEE 802.1D-1990: Section 6.7.1.1.3')
if mibBuilder.loadTexts: atswitchBrTpLearnedEntryDiscards.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpLearnedEntryDiscards.setDescription('The total number of Forwarding Database entries, which have been or would have been learnt, but have been discarded due to a lack of space to store them in the Forwarding Database. If this counter is increasing, it indicates that the Forwarding Database is regularly becoming full (a condition which has unpleasant performance effects on the subnetwork). If this counter has a significant value but is not presently increasing, it indicates that the problem has been occurring but is not persistent.')
atswitchBrTpAgingTime = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchBrTpAgingTime.setReference('IEEE 802.1D-1990: Section 6.7.1.1.3')
if mibBuilder.loadTexts: atswitchBrTpAgingTime.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpAgingTime.setDescription('The timeout period in seconds for aging out dynamically learned forwarding information. 802.1D-1990 recommends a default of 300 seconds.')
atswitchBrTpFdbTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 3), )
if mibBuilder.loadTexts: atswitchBrTpFdbTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpFdbTable.setDescription('A table that contains information about unicast entries for which the bridge has forwarding and/or filtering information. This information is used by the transparent bridging function in determining how to propagate a received frame.')
atswitchBrTpFdbEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 3, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBrTpFdbLanId"), (0, "ATSWTCH2-MIB", "atswitchBrTpFdbAddress"))
if mibBuilder.loadTexts: atswitchBrTpFdbEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpFdbEntry.setDescription('Information about a specific unicast MAC address for which the bridge has some forwarding and/or filtering information.')
atswitchBrTpFdbLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpFdbLanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpFdbLanId.setDescription('This object uniquely identifies the lan or Virtual lan.')
atswitchBrTpFdbAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 3, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpFdbAddress.setReference('IEEE 802.1D-1990: Section 3.9.1, 3.9.2')
if mibBuilder.loadTexts: atswitchBrTpFdbAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpFdbAddress.setDescription('A unicast MAC address for which the bridge has forwarding and/or filtering information.')
atswitchBrTpFdbPort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpFdbPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpFdbPort.setDescription("Either the value '0', or the port number of the port on which a frame having a source address equal to the value of the corresponding instance of atswitchBrTpFdbAddress has been seen. A value of '0' indicates that the port number has not been learned but that the bridge does have some forwarding/filtering information about this address . Implementors are encouraged to assign the port value to this object whenever it is learned even for addresses for which the corresponding value of atswitchBrTpFdbStatus is not learned(3).")
atswitchBrTpFdbStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2), ("other", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpFdbStatus.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpFdbStatus.setDescription('The status of this entry. The meanings of the values are: inactive(1) : this entry is not longer valid (e.g., it was learned but has since aged-out), but has not yet been flushed from the table. active(2) : the value of the corresponding instance of atswitchBrTpFdbPort was active, and is being used. other(3) : none of the following. This would include the case where some other MIB object (not the corresponding instance of atswitchBrTpFdbPort ) is being used to determine if and how frames addressed to the value of the corresponding instance of atswitchBrTpFdbAddress are being forwarded. ')
atswitchBrTpPortTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4), )
if mibBuilder.loadTexts: atswitchBrTpPortTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPortTable.setDescription('A table that contains information about every port that is associated with this transparent bridge.')
atswitchBrTpPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchBrTpPortLanId"), (0, "ATSWTCH2-MIB", "atswitchBrTpPort"))
if mibBuilder.loadTexts: atswitchBrTpPortEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPortEntry.setDescription('A list of information for each port of a transparent bridge.')
atswitchBrTpPortLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpPortLanId.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPortLanId.setDescription('This object uniquely identifies the lan or Virtual lan.')
atswitchBrTpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPort.setDescription('The port number of the port for which this entry contains Transparent bridging management information.')
atswitchBrTpPortMaxInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpPortMaxInfo.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPortMaxInfo.setDescription('The maximum size of the INFO (non-MAC) field that this port will receive or transmit.')
atswitchBrTpPortInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpPortInFrames.setReference('IEEE 802.1D-1990: Section 6.6.1.1.3')
if mibBuilder.loadTexts: atswitchBrTpPortInFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPortInFrames.setDescription('The number of frames that have been received by this port from its segment. Note that a frame received on the interface corresponding to this port is only counted by this object if and only if it is for a protocol being processed by the local bridging function, including bridge management frames.')
atswitchBrTpPortOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpPortOutFrames.setReference('IEEE 802.1D-1990: Section 6.6.1.1.3')
if mibBuilder.loadTexts: atswitchBrTpPortOutFrames.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPortOutFrames.setDescription('The number of frames that have been transmitted by this port to its segment. Note that a frame transmitted on the interface corresponding to this port is only counted by this object if and only if it is for a protocol being processed by the local bridging function, including bridge management frames.')
atswitchBrTpPortInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 9, 3, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchBrTpPortInDiscards.setReference('IEEE 802.1D-1990: Section 6.6.1.1.3')
if mibBuilder.loadTexts: atswitchBrTpPortInDiscards.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchBrTpPortInDiscards.setDescription('Count of valid frames received which were discarded (i.e., filtered) by the Forwarding Process.')
atswitchStaticMACTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 10, 1), )
if mibBuilder.loadTexts: atswitchStaticMACTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchStaticMACTable.setDescription(' This tables lists Mac Addresses lexicographically from the Static table which binds MAC addresses to ports on the switch. ')
atswitchStaticMACEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 10, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchStaticMACAddress"))
if mibBuilder.loadTexts: atswitchStaticMACEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchStaticMACEntry.setDescription(' Entry in the atswitchStaticMACTable. ')
atswitchStaticMACAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 10, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchStaticMACAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchStaticMACAddress.setDescription('This object identifies the MAC Address associated with Static MAC Table.')
atswitchStaticMACPortNumbers = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 10, 1, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchStaticMACPortNumbers.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchStaticMACPortNumbers.setDescription('This object identifies the Ports for which the above MAC address is associated Statically in Static MAC Table. This Object could take the port numbers as the Static Address added can be Multicast addresses. This Object is a DisplayString So, the input for example would be 1,5,6,8,9 ')
atswitchStaticMACVlan = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 10, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atswitchStaticMACVlan.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchStaticMACVlan.setDescription('The Static MAC Address to be added can also be confined to a Vlan also. If the Port does not belong to that Vlan. the Port is forced to be moved to that Vlan. ')
atswitchPortMACTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 12, 1), )
if mibBuilder.loadTexts: atswitchPortMACTable.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortMACTable.setDescription('A table that contains information about unicast entries seen on ports. ')
atswitchPortMACEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 12, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchPortMACAddress"), (0, "ATSWTCH2-MIB", "atswitchPortMACPort"))
if mibBuilder.loadTexts: atswitchPortMACEntry.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortMACEntry.setDescription('Information about a specific unicast MAC address seen on ports. ')
atswitchPortMACAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 12, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchPortMACAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortMACAddress.setDescription('A unicast MAC address seen on this port. ')
atswitchPortMACPort = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 12, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchPortMACPort.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchPortMACPort.setDescription('This object identifies the port on which the atswitchPortMACAddress was seen. ')
atswitchDebugMallocLogTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 10, 13, 1), )
if mibBuilder.loadTexts: atswitchDebugMallocLogTable.setStatus('deprecated')
if mibBuilder.loadTexts: atswitchDebugMallocLogTable.setDescription(' Dont care. ')
atswitchMallocLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 10, 13, 1, 1), ).setIndexNames((0, "ATSWTCH2-MIB", "atswitchDebugMallocLogIndex"))
if mibBuilder.loadTexts: atswitchMallocLogEntry.setStatus('deprecated')
if mibBuilder.loadTexts: atswitchMallocLogEntry.setDescription(' Entry in the atswitchDebugMallocLogEntry. ')
atswitchDebugMallocLogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 13, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchDebugMallocLogIndex.setStatus('deprecated')
if mibBuilder.loadTexts: atswitchDebugMallocLogIndex.setDescription(' This is strictly for Debug reason and this object should not be beleived as the correct number. This log is useful for finding the memory leak which is stored in Leak_Table. The Entry of this table is the address where malloc is done and no free is done. The size of this 6000. ')
atswitchDebugMallocLogCaller = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 13, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchDebugMallocLogCaller.setStatus('deprecated')
if mibBuilder.loadTexts: atswitchDebugMallocLogCaller.setDescription(" This is strictly for Debug reason and this object should not be beleived as the correct number. This log is useful for finding the memory leak which is stored in Leak_Table. The Entry of this table is the caller address who malloc'd . The size of this 6000. ")
atswitchDebugMallocLogAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 10, 13, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atswitchDebugMallocLogAddress.setStatus('deprecated')
if mibBuilder.loadTexts: atswitchDebugMallocLogAddress.setDescription(' This is strictly for Debug reason and this object should not be beleived as the correct number. This log is useful for finding the memory leak which is stored in Leak_Table. The Entry of this table is the address where malloc is done and no free is done. The size of this 6000. ')
atswitchDuplicateMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 8, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6))
if mibBuilder.loadTexts: atswitchDuplicateMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchDuplicateMacAddress.setDescription(' The Duplicate Mac address which is secured on atswitchSecurePortNumber. ')
atswitchIntruderMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 8, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6))
if mibBuilder.loadTexts: atswitchIntruderMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchIntruderMacAddress.setDescription(' The Mac address of the Intruder as seen on port which is the the next object ')
atswitchSecuredPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 10, 8, 3), Integer32())
if mibBuilder.loadTexts: atswitchSecuredPortNumber.setStatus('mandatory')
if mibBuilder.loadTexts: atswitchSecuredPortNumber.setDescription(' This is the Port number which has the duplicate Address as the secured address . ')
newRoot = NotificationType((1, 3, 6, 1, 4, 1, 207) + (0,101))
if mibBuilder.loadTexts: newRoot.setDescription('The newRoot trap indicates that the sending agent has become the new root of the Spanning Tree; the trap is sent by a bridge soon after its election as the new root, e.g., upon expiration of the Topology Change Timer immediately subsequent to its election. Implementation of this trap is optional.')
topologyChange = NotificationType((1, 3, 6, 1, 4, 1, 207) + (0,102))
if mibBuilder.loadTexts: topologyChange.setDescription('A topologyChange trap is sent by a bridge when any of its configured ports transitions from the Learning state to the Forwarding state, or from the Forwarding state to the Blocking state. The trap is not sent if a newRoot trap is sent for the same transition. Implementation of this trap is optional.')
intruderTrap = NotificationType((1, 3, 6, 1, 4, 1, 207) + (0,105))
if mibBuilder.loadTexts: intruderTrap.setDescription('The intruderTrap trap indicates that there was a Intruder was detected on the port shown as ifIndex. ')
mibBuilder.exportSymbols("ATSWTCH2-MIB", atswitchBrBaseNumPorts=atswitchBrBaseNumPorts, atswitchDeviceFlowControl=atswitchDeviceFlowControl, atswitchBrBasePortLanId=atswitchBrBasePortLanId, atswitchPortSTPPriority=atswitchPortSTPPriority, atswitchEthMonRxGoodFrames=atswitchEthMonRxGoodFrames, atswitchBrStpTimeSinceTopologyChange=atswitchBrStpTimeSinceTopologyChange, atswitchBrStpPortDesignatedRoot=atswitchBrStpPortDesignatedRoot, atswitchFwdVlanMACAddr=atswitchFwdVlanMACAddr, atswitchPortMACTable=atswitchPortMACTable, atswitchBrTp=atswitchBrTp, atswitchDebugMallocLogCaller=atswitchDebugMallocLogCaller, atswitchBeVlanName=atswitchBeVlanName, atswitchFwdVlanEntry=atswitchFwdVlanEntry, atswitchBrStpBridgeForwardDelay=atswitchBrStpBridgeForwardDelay, atswitchAddrDuplicate=atswitchAddrDuplicate, atswitchBrStpPortDesignatedPort=atswitchBrStpPortDesignatedPort, atswitchDebugMallocLogTable=atswitchDebugMallocLogTable, atswitchFwdVlanPort=atswitchFwdVlanPort, intruderTrap=intruderTrap, atswitchFwdVlanStatus=atswitchFwdVlanStatus, atswitchBrStpProtocolSpecification=atswitchBrStpProtocolSpecification, atswitchPortSecurityNumberOfAddresses=atswitchPortSecurityNumberOfAddresses, atswitchNwMgrIndex=atswitchNwMgrIndex, mibObject=mibObject, atswitchNwMgrEntry=atswitchNwMgrEntry, atswitchBeVlanRowStatus=atswitchBeVlanRowStatus, atswitchBrTpFdbEntry=atswitchBrTpFdbEntry, atswitchEthPortErrorId=atswitchEthPortErrorId, atswitchCurrentIpAddress=atswitchCurrentIpAddress, atswitchStaticMACEntry=atswitchStaticMACEntry, atswitchPortSpeed=atswitchPortSpeed, atswitchBrTpFdbPort=atswitchBrTpFdbPort, atswitchNMGroup=atswitchNMGroup, atswitchBrStpHoldTime=atswitchBrStpHoldTime, atswitchBrTpFdbTable=atswitchBrTpFdbTable, atswitchEthErrorLateCollisions=atswitchEthErrorLateCollisions, atswitchBeVlanTagId=atswitchBeVlanTagId, atswitchBrStpPortLanId=atswitchBrStpPortLanId, atswitchBrStpBridgeHelloTime=atswitchBrStpBridgeHelloTime, atswitchSecuredPortNumber=atswitchSecuredPortNumber, atswitchDefaultDomainName=atswitchDefaultDomainName, atswitchBrBaseEntry=atswitchBrBaseEntry, atswitchBasicVlanEntry=atswitchBasicVlanEntry, atswitchEthErrorStats=atswitchEthErrorStats, atiProduct=atiProduct, BridgeId=BridgeId, atswitchEthErrorRxBadFrames=atswitchEthErrorRxBadFrames, MacAddress=MacAddress, atswitchPortState=atswitchPortState, atswitchMirroringDestinationPort=atswitchMirroringDestinationPort, atswitchRxGoodFrames=atswitchRxGoodFrames, atswitchBrBasePortDelayExceededDiscards=atswitchBrBasePortDelayExceededDiscards, atswitchBrTpPortMaxInfo=atswitchBrTpPortMaxInfo, atswitchPortMACEntry=atswitchPortMACEntry, topologyChange=topologyChange, atswitchBrBasePortTable=atswitchBrBasePortTable, atswitchMallocLogEntry=atswitchMallocLogEntry, atswitchBrStp=atswitchBrStp, atswitchDebugMallocLogIndex=atswitchDebugMallocLogIndex, atswitchStaticMACGroup=atswitchStaticMACGroup, atswitchPvPortNumber=atswitchPvPortNumber, atswitchPortMACAddress=atswitchPortMACAddress, atswitchTrapAttrGroup=atswitchTrapAttrGroup, atswitchBrBaseBridgeAddress=atswitchBrBaseBridgeAddress, atswitchPortDisableOnSecurityViolation=atswitchPortDisableOnSecurityViolation, atswitchBrStpRootCost=atswitchBrStpRootCost, atswitchBrTpPortOutFrames=atswitchBrTpPortOutFrames, atswitchAddrMismatches=atswitchAddrMismatches, atswitchBrStpRootPort=atswitchBrStpRootPort, atswitchBrBaseTable=atswitchBrBaseTable, atswitchPortEntry=atswitchPortEntry, atswitchBrStpTopChanges=atswitchBrStpTopChanges, atswitchBrTpPortInFrames=atswitchBrTpPortInFrames, atswitchDebugMallocLogGroup=atswitchDebugMallocLogGroup, atswitchProductType=atswitchProductType, alliedTelesyn=alliedTelesyn, atswitchBridgeMib=atswitchBridgeMib, atswitchBasicVlanTable=atswitchBasicVlanTable, Timeout=Timeout, atswitchVlanConfigGroup=atswitchVlanConfigGroup, atswitchMDA1Type=atswitchMDA1Type, atswitchConfiguredRouter=atswitchConfiguredRouter, atswitchEthernetPortCount=atswitchEthernetPortCount, atswitchBrStpDesignatedRoot=atswitchBrStpDesignatedRoot, atswitchBrStpTable=atswitchBrStpTable, atswitchNwMgrTable=atswitchNwMgrTable, atswitchBrTpPortInDiscards=atswitchBrTpPortInDiscards, atswitchEthMonTxGoodFrames=atswitchEthMonTxGoodFrames, atswitchBrStpPortPriority=atswitchBrStpPortPriority, atswitchBrStpPriority=atswitchBrStpPriority, atswitchSwVersion=atswitchSwVersion, atswitchDNServer=atswitchDNServer, atswitchEthPortErrorRxBadFrames=atswitchEthPortErrorRxBadFrames, atswitchBrStpPortPathCost=atswitchBrStpPortPathCost, atswitchMirrorState=atswitchMirrorState, atswitchBrTpPortTable=atswitchBrTpPortTable, atswitchEthPortMonTxTotalBytes=atswitchEthPortMonTxTotalBytes, atswitchBrStpPortEntry=atswitchBrStpPortEntry, atswitchBrStpHelloTime=atswitchBrStpHelloTime, atswitchEthernetStatsGroup=atswitchEthernetStatsGroup, atswitchConfiguredIpAddress=atswitchConfiguredIpAddress, atswitchPortTable=atswitchPortTable, atswitchBrBasePort=atswitchBrBasePort, atswitchTrunkConfig=atswitchTrunkConfig, atswitchMirroringSourcePort=atswitchMirroringSourcePort, atswitchEthPortErrorEntry=atswitchEthPortErrorEntry, atswitchBrTpFdbAddress=atswitchBrTpFdbAddress, atswitchEthErrorCRC=atswitchEthErrorCRC, atswitchBrTpTable=atswitchBrTpTable, atswitchSysGroup=atswitchSysGroup, atswitchDebugAvailableBytes=atswitchDebugAvailableBytes, atswitchEthErrorAlignment=atswitchEthErrorAlignment, atswitchEthMonTxDeferred=atswitchEthMonTxDeferred, atswitchEthPortError=atswitchEthPortError, atswitchStaticMACVlan=atswitchStaticMACVlan, atswitchPortAutosenseOrHalfDuplex=atswitchPortAutosenseOrHalfDuplex, atswitchBrBasePortMtuExceededDiscards=atswitchBrBasePortMtuExceededDiscards, atswitchStaticMACAddress=atswitchStaticMACAddress, atswitchBrStpMaxAge=atswitchBrStpMaxAge, atswitchBrTpAgingTime=atswitchBrTpAgingTime, atswitchDuplicateMacAddress=atswitchDuplicateMacAddress, atswitchBrBase=atswitchBrBase, atswitchEthPortMonEntry=atswitchEthPortMonEntry, atswitchTxGoodFrames=atswitchTxGoodFrames, atswitchBrBasePortEntry=atswitchBrBasePortEntry, atswitchBrTpPortLanId=atswitchBrTpPortLanId, atswitchBrStpPortDesignatedBridge=atswitchBrStpPortDesignatedBridge, atswitchPortTransmitPacingConfig=atswitchPortTransmitPacingConfig, atswitchBrTpPortEntry=atswitchBrTpPortEntry, atswitchBrTpFdbLanId=atswitchBrTpFdbLanId, atswitchBrTpPort=atswitchBrTpPort, atswitchPortMACPort=atswitchPortMACPort, atswitchEthPortErrorTable=atswitchEthPortErrorTable, atswitchFwdVlanAge=atswitchFwdVlanAge, atswitchEthMonTxTotalBytes=atswitchEthMonTxTotalBytes, atswitchBrStpPortDesignatedCost=atswitchBrStpPortDesignatedCost, atswitchMDA2Type=atswitchMDA2Type, atswitchIpGroup=atswitchIpGroup, atswitchEthPortMonStats=atswitchEthPortMonStats, atswitchPortConfigGroup=atswitchPortConfigGroup, atswitchRxOverruns=atswitchRxOverruns, atswitchEthPortMonId=atswitchEthPortMonId, atswitchBrStpPortEnable=atswitchBrStpPortEnable, atswitchPortNumber=atswitchPortNumber, atswitchSecurityAction=atswitchSecurityAction, atswitchBeVlanPortMask=atswitchBeVlanPortMask, atswitchPortBridgeid=atswitchPortBridgeid, atswitchIntruderMacAddress=atswitchIntruderMacAddress, atswitchEthMonStats=atswitchEthMonStats, atswitchBrBasePortCircuit=atswitchBrBasePortCircuit, atswitchSwGroup=atswitchSwGroup, atswitchBrBaseLanId=atswitchBrBaseLanId, atswitchPortSwitchingType=atswitchPortSwitchingType, atswitchPortMacAddrGroup=atswitchPortMacAddrGroup, atswitchFwdVlanGroup=atswitchFwdVlanGroup, atswitchEthPortStatsGroup=atswitchEthPortStatsGroup, atswitchFwdVlanVlanId=atswitchFwdVlanVlanId, atswitchBrStpPortState=atswitchBrStpPortState, atswitchFwdVlanTable=atswitchFwdVlanTable, atswitchTxBroadcastFrames=atswitchTxBroadcastFrames, atswitchBrStpPortTable=atswitchBrStpPortTable, atswitchBeVlanIndex=atswitchBeVlanIndex, atswitchPortName=atswitchPortName, atswitchBrStpLanId=atswitchBrStpLanId, atswitchIPAddressStatus=atswitchIPAddressStatus, atswitchEthPortErrorTxTotal=atswitchEthPortErrorTxTotal, atswitchBrTpLanId=atswitchBrTpLanId, atswitchPortSTPConfig=atswitchPortSTPConfig, atswitchBrStpPort=atswitchBrStpPort, atswitchBrBasePortIfIndex=atswitchBrBasePortIfIndex, atswitchPortFlowControlEnable=atswitchPortFlowControlEnable, atswitchPortSTPCost=atswitchPortSTPCost, atswitchConfiguredSubnetMask=atswitchConfiguredSubnetMask, atswitchBrTpFdbStatus=atswitchBrTpFdbStatus, atswitchPortLinkState=atswitchPortLinkState, atswitchEthPortMonTable=atswitchEthPortMonTable, atswitchEthMonTxCollisions=atswitchEthMonTxCollisions, atswitchBrStpForwardDelay=atswitchBrStpForwardDelay, atswitchPort2VlanEntry=atswitchPort2VlanEntry, atswitchTxMulticastFrames=atswitchTxMulticastFrames, atswitchNwMgrIpAddr=atswitchNwMgrIpAddr, atswitchSwProduct=atswitchSwProduct, atswitchStaticMACPortNumbers=atswitchStaticMACPortNumbers, atswitchDebugMallocLogAddress=atswitchDebugMallocLogAddress, atswitchBrBaseType=atswitchBrBaseType, newRoot=newRoot, atswitchBrTpEntry=atswitchBrTpEntry, atswitchReset=atswitchReset, atswitchBrTpLearnedEntryDiscards=atswitchBrTpLearnedEntryDiscards, atswitchEthErrorTxTotal=atswitchEthErrorTxTotal, atswitchBrStpEntry=atswitchBrStpEntry, atswitchPvVlanName=atswitchPvVlanName, atswitchPortDuplexStatus=atswitchPortDuplexStatus, atswitchStaticMACTable=atswitchStaticMACTable, atswitchSecurityConfig=atswitchSecurityConfig, atswitchBrStpBridgeMaxAge=atswitchBrStpBridgeMaxAge, atswitchBrStpPortForwardTransitions=atswitchBrStpPortForwardTransitions, atswitchPort2VlanTable=atswitchPort2VlanTable, atswitchConfigGroup=atswitchConfigGroup, atswitchMib=atswitchMib)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
d74e6cfee0eaffc2c322f2c874f880f51dcd580b | 057b67b78201b7b5dbbd9f4030146b42ddb8d02d | /docs/aes.py | 4eea92bfe0a7b4111ec20f594c66dcef9fa72cd2 | [] | no_license | ttme08/fanqiang | 1213444f76a6131885d3b4ce23e7e85f2690fa01 | 5c277f128ffd7733040934eaaa83a3d1c6baeb22 | refs/heads/master | 2022-06-14T17:17:30.399885 | 2020-05-04T12:40:22 | 2020-05-04T12:40:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,924 | py | J4KMEAGjZX8Sei0xEEEiKSoWraXG/WLYrGZiuaC8eHtt5NJiZTWL6Sf9exZyNjup5J9pT8bd61tnL0gO3+sRk/+hdwrfWHRX81m+haccHtDvjrm97RiNPl0OFdD0nWbS+XvyWjJVFb3bWV+uiJgqjwu/RZwqhuwSgJyWhzfXsm5xyPe2SWEqty3JuQwHzbylZGJ+OWpEXxLW+tfE/99gR2HCLFEjO/cOKf9CU0E4oj1+cRUhNYcqcyJlreRjNOKJMKpZYhg+u/05x1esiOuPWfdfwA3ezKwL+kF1cLPcNsEdEYipOOp/G9mW7VIgeRg0umvmxFtlMmsVDtPegEoI4kDGhvItn+EnEpMaazcCCO/2pbSEaLjWtO6YS2n/Xgquq3iJMo/EyFm3+fGsSR9dd8JUR8WPa3TZtjMpGVMy0KmIPPnODdY/Ve1GxNvHbZBdnjyH2qBqN87eldLtKXl87ZUthavGvWgrDTGLczjaa3rwTg+Wet2NoN7CBoLHyaLdCgPM3K+aASyrjjwxVMJ9gQRxUCW/ghcn5ahJssbioswsFkd6TL4wvLJ+y5C3LAOAt/uAn3jseClocKO4SkbfZiNdoNL0McmWLa2sG46a70XRjAeEP5v5ZY4MX4B9ucElhjxXyVsWz1s4DuKsCKH+OB7lODEQiKVDW2zehXEOicmxVLK30sjWhxmbd3pJPw4slbxl06EV+zjxIzGFlmTumEHfmRPCAZL8dXa453dXARpSKLVoi19waEDH9F4KJ49r2yFnt+A3i58TZ7y0EklYwUycKbFK6/nmxcCCJfjMerMtHWvFw9gowEWsHIuhd6kHLhkF9qdN1Kq0g30Pju+OrGjbUMJB40VS+F9OnKl7GkESuGHwwGy6vSDFqyNqsQB2VrHa+Nb5iE2bnhpIKSE0xgZTQbVqBAt1m5Scl0lVhgaNwJjk+OM+xJ9JUCc22xkFTZg4DFtfs5akyd0P6EY7zBu6D/pTziuSq+WWidmyoEh/xyNfxStBbelcWcA3IWXPQE9DENeQI6BCqp1XZw6cz16Lvd4Kgj6+oAmNlUy1JcMrUaPKp2UThDJkRlS2yIlRn+dCN5BWx/+8mdIBg4PEcbU7OncGrxFjSQqesgNJAhr85l+m7BSmVOH9osnualXc53SEmYd2RMceJ3CZuJmHbXOaFy6Wyl0uaDoKOm+PeXGij2Gp0wDkQdTASQkDbYqV7pAC9jF7cWSGUVEUfXcbPg1I0ynhFc6QA3WDgM+2CWyGenlTduMum2ZHKlVorfUfZUBAzXyQ4NC2mjiQ8egbKQ2Rl27YA7I2s2KW99a9OScAPp1Y8Gok308lmq5cRR+OL6ggbI4oNGCqZUHU2VdvELggBNRrY2WOfuZsXi6evsVY5EC/CTuTeaNswBo0ZzQiexo6Z4aFQ/VXavS0g+XpMhOAOJIRiM9sa/8abs4AIGn9CJxyBO0PPiynGFe38vSuI7jtLedi6FySUiVeSPzlGQX17cbr/z0LTvyBT4xBc82kYANP0sBvPsUFYxR/3Gd+jQyj830ybtHrH24jQe9n5Ke2TGIgWHKB811y62HUwIzl2h3YvxqqxT+37BxQ4VuOGu9IYPI3vFpQ5CdW9BUtxvnSimybPPPekXV2/sdWOTE7uzJz2s2kRq8bkYSwz7p3ZfLscRL3cQ+BPIFhVgPxR82ddAPlS2KrTHYLjFqzptYkVMdTW2s/yQUTeOJ/GsulZD3aZSghppRjLfPclJpBIgPdxlnB8hXiRbPYrDOF9cjLN6eYTLh0K30zGT1kd0g8D1B6AAQJfa2SUsE+WLDDzIPz/Pbz6I+I2M57B9i07Qm5wTyFBhc75nOApjnxbOyQd+v50/yRiqmrYeS8c284+d6axAy/ggpnT1LTzjm3DrxbZlYrBsSVPTZTMCbqSuQaJKsChxRhV43yCdPPeJN8kbOn9FMkfCZSX2+0NQckcLxP5Cm96KrmMksJ10rHfoWr27vYiwhl84eXseEmrKYjTzbi+FRqii8Xtl3VoLz3Fn+jOG78PZQnJv9tJAnIeLrqitBF8m4kAFHgLTBWq8THVMy1HEtHBZ/DPIUQ9U7YN3U0USQTHvCscd4aIOO9jvLgf3ie65DL02Ob+xKd6lYb1K17ALnGTA8fxmXpK6gqdTOBkuLx9bQstETEBs5pmPRx43Cugu1CYNVySqjNdojyP3fUWjeeItsZWnOe66R54KxZ5b1MtP+JrHX8P/fqnqtro7b0MML7eATde/8Tl5+QPF4vle3NwqAAPoHZibQnfS5mP1vd+l4/en/P6ot1xjM6hqVWeb7TNWHok5tynwgu4SXufmsv+pFkphyyaMzYLiRyKE8QORDOs6etBhiLEA3a2DOqP1LwWRX08+pZ/hiA1GNm1KPD5w+AwoNXYOKDnzlrmvcMVtzO2BQW5VCDn1jPtqWsEyXBNlzXVBjg3E6HaCWwp2DcyBEDzXV5TNh/3JYSVkFZDRh06F3LgzsjP2hymjM3afT+G7jC1J0b/uJvdgLOAMJUKLYX8w3q/wMaggsEZemP6iyCQOwVCKFayHbYy9trHuvoY0NRlJ+VDv2x3TEmHJ8yOqlhw11dsyAt0PA3IkrZ8Y33xoXpPlRhBIUivm13y1/susgZOpboQCgMIg89Ogs/El/be2Ng/OEPbCclGw8jx6jFhJ5/tD6ckeRdjpGp3dHdsGd8uyiZfI9LBvg8GerTC89wJ8mQiOo9ijV0DaAfARxSnS7XtoBXO+rKnUpD28oFZLoXIOOmDjtESfi6ISV1z9itHew1RRYWEhUxloQjHqfjeZHQg61X1GDlcfPzoEuayTK4f5w6V4y4QyQZ6oOVJRsKMEf3V+h1xqsS0hltfIySW0iX6wJRtrtuV7vombQRqe0xvM3hfwyjvOe3Lo3tvvq9yvzuV98PJ/o= | [
"banned.ebook@gmail.com"
] | banned.ebook@gmail.com |
afdcc0c01670b6fc5fa0699e55af74a39a6142d1 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_148/ch129_2020_04_01_17_10_28_115191.py | 34a06276257b7733d2482ed96600077a526ce61a | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | def verifica_quadrado_perfeito(x):
i = 1
n = x-i
while n!=i:
n == x-i
i+=2
if x-i == 0:
return True
elif x-i < 0:
return False | [
"you@example.com"
] | you@example.com |
a7c78601b7a6b035ad14b7ba9cb3399e16ee8108 | 6e1d6d058907e689207ca51525adb0fc11fb5f54 | /Chapter05/Exercise5.03/bookr/reviews/tests.py | 312a8d690b716c2aff67cfdb22e04c6bdf7f9d28 | [
"MIT"
] | permissive | lmoshood/The-Django-Workshop | a083b9f171e7f8388abd51ea82927377721d01a9 | 52e86a8f93cb38bf70d50e9b8d2c6d7dac416f62 | refs/heads/master | 2022-04-20T20:13:59.917020 | 2020-04-19T23:23:19 | 2020-04-19T23:23:19 | 259,112,469 | 0 | 1 | null | 2020-04-26T19:11:57 | 2020-04-26T19:11:56 | null | UTF-8 | Python | false | false | 2,334 | py | import os
from urllib.request import urlopen
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
def read_content(path):
with open(path) as f:
return f.read()
class Exercise3Test(StaticLiveServerTestCase):
"""
These tests use `StaticLiveServerTestCase` and `urlopen` since the normal `TestCase` uses a special server that does
not serve static assets.
"""
def test_django_conf(self):
"""
Check that `reviews` is in `settings.INSTALLED_APPS` and that the static dir is set to <projectdir>/static.
"""
self.assertIn('reviews', settings.INSTALLED_APPS)
self.assertEquals([settings.BASE_DIR + '/static'], settings.STATICFILES_DIRS)
def test_main_css_get(self):
"""
Test that the main.css can be downloaded, and the content matches that on disk. This also checks that main.css
is in the right location and is being served using the static files finder.
Since we have the contents of the file we can check it has the right rules too.
"""
response = urlopen(self.live_server_url + '/static/main.css').read()
with open(os.path.join(settings.BASE_DIR, 'static', 'main.css'), 'rb') as f:
self.assertEqual(response, f.read())
self.assertIn(b'.navbar', response)
self.assertIn(b'.navbar-brand', response)
self.assertIn(b'.navbar-brand > img', response)
self.assertIn(b'body', response)
self.assertIn(b'h1, h2, h3, h4, h5, h6', response)
def test_base_html_content(self):
"""
In the base HTML we should see: {% load static %}, CSS loaded with {% static %} template tag, fonts load CSS
tag, and no <style>...</style> tags.
"""
base_template = read_content(os.path.join(settings.BASE_DIR, 'templates', 'base.html'))
self.assertIn('{% load static %}', base_template)
self.assertIn('<link rel="stylesheet" href="{% static \'main.css\' %}">', base_template)
self.assertIn('<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Libre+Baskerville|'
'Source+Sans+Pro&display=swap">', base_template)
self.assertNotIn('<style>', base_template)
self.assertNotIn('</style>', base_template)
| [
"ben@beneboy.co.nz"
] | ben@beneboy.co.nz |
134669add83e4232b2570c51e0fed52d4fb43c12 | 3416464630bc3322dd677001811de1a6884c7dd0 | /dynamic_program/q1143_longestCommonSubsequence/__init__.py | adf40b3d5d875edbfd4202d998f732b09bf6200a | [] | no_license | ttomchy/LeetCodeInAction | f10403189faa9fb21e6a952972d291dc04a01ff8 | 14a56b5eca8d292c823a028b196fe0c780a57e10 | refs/heads/master | 2023-03-29T22:10:04.324056 | 2021-03-25T13:37:01 | 2021-03-25T13:37:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
FileName: __init__.py.py
Description:
Author: Barry Chow
Date: 2020/10/15 10:19 AM
Version: 0.1
"""
from .DP_Solution import Solution
__all__ =[
'Solution'
] | [
"zhouenguo@163.com"
] | zhouenguo@163.com |
f35bb1db7e398b0c815fab2296103f35be66b629 | f6078890ba792d5734d289d7a0b1d429d945a03a | /hw2/submission/zhubenjamin/zhubenjamin_37891_1274820_Problem 1.py | 579a43701a6466f3d97ec4fe8b7c6cc99cbc2f7b | [] | no_license | huazhige/EART119_Lab | 1c3d0b986a0f59727ee4ce11ded1bc7a87f5b7c0 | 47931d6f6a2c7bc053cd15cef662eb2f2027712c | refs/heads/master | 2020-05-04T23:40:53.709217 | 2019-06-11T18:30:45 | 2019-06-11T18:30:45 | 179,552,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,301 | py | # -*- coding: utf-8 -*-
"""
Created on Apr 21, 2019
Class = Astro/Eart 119
Homework 2 - Plots and Animations
Student = Benjamin Zhu (1696575)
"""
#============================================
# (a) imports
#============================================
import numpy as np
injWell = np.loadtxt('injWell_OK.txt').T #import the txt file while transposing them
seism = np.loadtxt('seism_OK.txt').T
#============================================
# (b) Convert to decimal years
#============================================
Yr = seism[1:2] #assigning the row of data files to their variables
Mo = seism[2:3]
Dy = seism[3:4]
Hr = seism[4:5]
Mn = seism[5:6]
Sc = seism[6:7]
DecYear = Yr + (Mo-1)/12 + (Dy-1)/365.25 + Hr/(365.25*24) +\
Mn/(365.25*24*60) + Sc/(365.25*24*3600) #calculations
print(DecYear)
#============================================
# (c) calculate earth quake rate (not solved)
#============================================
"""
def comp_rate( at, k_win):
# smoothed rate from overlapping sample windows normalized by delta_t
aS = np.arange( 0, at.shape[0]-k_win, 1)
aBin, aRate = np.zeros(aS.shape[0]), np.zeros(aS.shape[0])
iS = 0
for s in aS:
i1, i2 = s, s+k_win
aBin[iS] = 0.5*( at[i1]+at[i2])
aRate[iS] = k_win/( at[i2]-at[i1])
iS += 1
return aBin, aRate
#===================================================================================
# dir, file, and parameter
#===================================================================================
# for seism rate
k_win = 200
binsize = 10 # for histogram
# variables
t0 = float( ) # starting time of time axis
at = np.array([]) # time of seismicity
aMag = np.array([]) # magnitudes
aT_inj = np.array([]) # time of injections
aV = np.array([]) # injected volume
#aBin,aRate = np.array([]), np.array([]) # bins and seismicity rates
answer = comp_rate(at, k_win)
print (answer)
"""
#============================================
#
#============================================
#============================================
#
#============================================
| [
"hge2@ucsc.edu"
] | hge2@ucsc.edu |
fd5d67ef0340351cfc841b9b113a3ecc4e7b8449 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/224/users/4363/codes/1721_3035.py | 9964b5fe3880b93dfe8713cd506071cb92e26c0a | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | a=input("escreva um numero")
If=(a>0):
msg=dasd | [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
9df1353b0a03a8a08e934b246193d8cde5896f35 | 9df2fb0bc59ab44f026b0a2f5ef50c72b2fb2ceb | /sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_list.py | 953d0df43bb9313da6edaeaf6657c5ef22dadf96 | [
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] | permissive | openapi-env-test/azure-sdk-for-python | b334a2b65eeabcf9b7673879a621abb9be43b0f6 | f61090e96094cfd4f43650be1a53425736bd8985 | refs/heads/main | 2023-08-30T14:22:14.300080 | 2023-06-08T02:53:04 | 2023-06-08T02:53:04 | 222,384,897 | 1 | 0 | MIT | 2023-09-08T08:38:48 | 2019-11-18T07:09:24 | Python | UTF-8 | Python | false | false | 1,666 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagemover import StorageMoverMgmtClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-storagemover
# USAGE
python job_definitions_list.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = StorageMoverMgmtClient(
credential=DefaultAzureCredential(),
subscription_id="11111111-2222-3333-4444-555555555555",
)
response = client.job_definitions.list(
resource_group_name="examples-rg",
storage_mover_name="examples-storageMoverName",
project_name="examples-projectName",
)
for item in response:
print(item)
# x-ms-original-file: specification/storagemover/resource-manager/Microsoft.StorageMover/stable/2023-03-01/examples/JobDefinitions_List.json
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | openapi-env-test.noreply@github.com |
3788541c03d8e3cdd90d225eba3d8e953c24f588 | f07a42f652f46106dee4749277d41c302e2b7406 | /Test Set/Open Source Projects/tensorlayer/88d239631b9eb49527c21053d79d55e012f11a3c-2-bug.py | a5c542dc0897f55fcb1489019b0b411308f3883d | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,490 | py | #!/usr/bin/env python
__doc__ = """
This demo implements FastText[1] for sentence classification. FastText is a
simple model for text classification with performance often close to
state-of-the-art, and is useful as a solid baseline.
There are some important differences between this implementation and what
is described in the paper. Instead of Hogwild! SGD[2], we use Adam optimizer
with mini-batches. Hierarchical softmax is also not supported; if you have
a large label space, consider utilizing candidate sampling methods provided
by TensorFlow[3].
After 5 epochs, you should get test accuracy close to 90.9%.
[1] Joulin, A., Grave, E., Bojanowski, P., & Mikolov, T. (2016).
Bag of Tricks for Efficient Text Classification.
http://arxiv.org/abs/1607.01759
[2] Recht, B., Re, C., Wright, S., & Niu, F. (2011).
Hogwild: A Lock-Free Approach to Parallelizing Stochastic Gradient Descent.
In Advances in Neural Information Processing Systems 24 (pp. 693–701).
[3] https://www.tensorflow.org/api_guides/python/nn#Candidate_Sampling
"""
import array
import hashlib
import time
import numpy as np
import tensorflow as tf
import tensorlayer as tl
from tensorlayer.layers import *
# Hashed n-grams with 1 < n <= N_GRAM are included as features
# in addition to unigrams.
N_GRAM = 2
# Size of vocabulary; less frequent words will be treated as "unknown"
VOCAB_SIZE = 100000
# Number of buckets used for hashing n-grams
N_BUCKETS = 1000000
# Size of the embedding vectors
EMBEDDING_SIZE = 50
# Number of epochs for which the model is trained
N_EPOCH = 5
# Size of training mini-batches
BATCH_SIZE = 32
# Path to which to save the trained model
MODEL_FILE_PATH = 'model.npz'
class FastTextClassifier(object):
"""Simple wrapper class for creating the graph of FastText classifier."""
def __init__(self, vocab_size, embedding_size, n_labels):
self.vocab_size = vocab_size
self.embedding_size = embedding_size
self.n_labels = n_labels
self.inputs = tf.placeholder(tf.int32, shape=[None, None], name='inputs')
self.labels = tf.placeholder(tf.int32, shape=[None], name='labels')
# Network structure
network = AverageEmbeddingInputlayer(self.inputs, self.vocab_size, self.embedding_size)
self.network = DenseLayer(network, self.n_labels)
# Training operation
cost = tl.cost.cross_entropy(self.network.outputs, self.labels, name='cost')
self.train_op = tf.train.AdamOptimizer().minimize(cost)
# Predictions
self.prediction_probs = tf.nn.softmax(self.network.outputs)
self.predictions = tf.argmax(self.network.outputs, axis=1, output_type=tf.int32)
# self.predictions = tf.cast(tf.argmax( # for TF < 1.2
# self.network.outputs, axis=1), tf.int32)
# Evaluation
are_predictions_correct = tf.equal(self.predictions, self.labels)
self.accuracy = tf.reduce_mean(tf.cast(are_predictions_correct, tf.float32))
def save(self, sess, filename):
tl.files.save_npz(self.network.all_params, name=filename, sess=sess)
def load(self, sess, filename):
tl.files.load_and_assign_npz(sess, name=filename, network=self.network)
def augment_with_ngrams(unigrams, unigram_vocab_size, n_buckets, n=2):
"""Augment unigram features with hashed n-gram features."""
def get_ngrams(n):
return list(zip(*[unigrams[i:] for i in range(n)]))
def hash_ngram(ngram):
bytes_ = array.array('L', ngram).tobytes()
hash_ = int(hashlib.sha256(bytes_).hexdigest(), 16)
return unigram_vocab_size + hash_ % n_buckets
return unigrams + [hash_ngram(ngram) for i in range(2, n + 1) for ngram in get_ngrams(i)]
def load_and_preprocess_imdb_data(n_gram=None):
"""Load IMDb data and augment with hashed n-gram features."""
X_train, y_train, X_test, y_test = \
tl.files.load_imdb_dataset(nb_words=VOCAB_SIZE)
if n_gram is not None:
X_train = np.array([augment_with_ngrams(x, VOCAB_SIZE, N_BUCKETS, n=n_gram) for x in X_train])
X_test = np.array([augment_with_ngrams(x, VOCAB_SIZE, N_BUCKETS, n=n_gram) for x in X_test])
return X_train, y_train, X_test, y_test
def train_test_and_save_model():
X_train, y_train, X_test, y_test = load_and_preprocess_imdb_data(N_GRAM)
classifier = FastTextClassifier(
vocab_size=VOCAB_SIZE + N_BUCKETS,
embedding_size=EMBEDDING_SIZE,
n_labels=2,
)
with tf.Session() as sess:
tl.layers.initialize_global_variables(sess)
for epoch in range(N_EPOCH):
start_time = time.time()
print('Epoch %d/%d' % (epoch + 1, N_EPOCH))
for X_batch, y_batch in tl.iterate.minibatches(X_train, y_train, batch_size=BATCH_SIZE, shuffle=True):
sess.run(
classifier.train_op, feed_dict={
classifier.inputs: tl.prepro.pad_sequences(X_batch),
classifier.labels: y_batch,
})
print(" took %.5fs" % (time.time() - start_time))
test_accuracy = sess.run(
classifier.accuracy, feed_dict={
classifier.inputs: tl.prepro.pad_sequences(X_test),
classifier.labels: y_test,
})
print('Test accuracy: %.5f' % test_accuracy)
classifier.save(sess, MODEL_FILE_PATH)
if __name__ == '__main__':
train_test_and_save_model()
| [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
b4d9093c07bfb5ebb268fc66790a9f456208aeda | e3b89fc928ed736b1cdf7067d71c0f5f7d9e3586 | /encodeData.py | 9093b97281d18e236b8cd5e868d896c1ee0384cc | [] | no_license | tgadf/pymva | 0b801277b27eb626ee61424e6ef24716087ba582 | 960127c880e61732db77c1049a5fe5ab9918e534 | refs/heads/master | 2020-04-02T19:27:38.956710 | 2018-10-30T00:10:29 | 2018-10-30T00:10:29 | 154,734,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 7 23:17:31 2018
@author: tgadfort
"""
#conda install -c conda-forge category_encoders
#https://github.com/scikit-learn-contrib/categorical-encoding
import category_encoders as ce
encoder = ce.BackwardDifferenceEncoder(cols=[...])
encoder = ce.BinaryEncoder(cols=[...])
encoder = ce.HashingEncoder(cols=[...])
encoder = ce.HelmertEncoder(cols=[...])
encoder = ce.OneHotEncoder(cols=[...])
encoder = ce.OrdinalEncoder(cols=[...])
encoder = ce.SumEncoder(cols=[...])
encoder = ce.PolynomialEncoder(cols=[...])
encoder = ce.BaseNEncoder(cols=[...])
encoder = ce.LeaveOneOutEncoder(cols=[...]) | [
"tgadfort@gmail.com"
] | tgadfort@gmail.com |
39f4aec86bf95b756d12cd722cb068c5c35e5824 | 60d737103373825b858e67292865bda8c6f2094f | /active/theses-harvard.py | 52bc1449f4ad0047fdfea0a63b2cea46a68924ce | [] | no_license | fschwenn/ejlmod | fbf4692b857f9f056f9105a7f616a256725f03b6 | ef17512c2e44baa0164fdc6abc997c70ed3d2a74 | refs/heads/master | 2023-01-24T18:56:35.581517 | 2023-01-20T11:18:16 | 2023-01-20T11:18:16 | 91,459,496 | 1 | 1 | null | 2021-10-04T11:58:15 | 2017-05-16T13:06:57 | Python | UTF-8 | Python | false | false | 4,100 | py | # -*- coding: utf-8 -*-
#harvest theses from Harvard
#FS: 2020-01-14
import getopt
import sys
import os
import urllib2
import urlparse
from bs4 import BeautifulSoup
import re
import ejlmod2
import codecs
import datetime
import time
import json
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
xmldir = '/afs/desy.de/user/l/library/inspire/ejl'
retfiles_path = "/afs/desy.de/user/l/library/proc/retinspire/retfiles"#+'_special'
now = datetime.datetime.now()
stampoftoday = '%4d-%02d-%02d' % (now.year, now.month, now.day)
publisher = 'Harvard U. (main)'
rpp = 20
numofpages = 1
departments = [('m', 'Mathematics'), ('', 'Physics'), ('a', 'Astronomy'), ('c', 'Computer+Science')]
driver = webdriver.PhantomJS()
driver.implicitly_wait(30)
hdr = {'User-Agent' : 'Magic Browser'}
recs = []
for (fc, dep) in departments:
for i in range(numofpages):
tocurl = 'https://dash.harvard.edu/handle/1/4927603/browse?type=department&value=%s&rpp=%i&sort_by=2&type=dateissued&offset=%i&etal=-1&order=DESC' % (dep, rpp, i*rpp)
print '---{ %s }---{ %i/%i }---{ %s }------' % (dep, i+1, numofpages, tocurl)
req = urllib2.Request(tocurl, headers=hdr)
tocpage = BeautifulSoup(urllib2.urlopen(req), features="lxml")
time.sleep(10)
for div in tocpage.body.find_all('div', attrs = {'class' : 'artifact-description'}):
for a in div.find_all('a'):
rec = {'tc' : 'T', 'jnl' : 'BOOK', 'oa' : False, 'note' : [ dep ]}
rec['link'] = 'https://dash.harvard.edu' + a['href']
rec['tit'] = a.text.strip()
if fc: rec['fc'] = fc
recs.append(rec)
jnlfilename = 'THESES-HARVARD-%s' % (stampoftoday)
j = 0
for rec in recs:
j += 1
print '---{ %i/%i }---{ %s }------' % (j, len(recs), rec['link'])
try:
driver.get(rec['link'])
artpage = BeautifulSoup(driver.page_source, features="lxml")
except:
time.sleep(60)
print 'wait a minute'
driver.get(rec['link'])
artpage = BeautifulSoup(driver.page_source, features="lxml")
time.sleep(5)
#author
for meta in artpage.find_all('meta', attrs = {'name' : 'citation_author'}):
rec['autaff'] = [[ meta['content'], publisher ]]
for meta in artpage.find_all('meta'):
if meta.has_attr('name'):
#date
if meta['name'] == 'DC.date':
rec['date'] = meta['content']
#abstract
elif meta['name'] == 'DCTERMS.abstract':
#if meta.has_attr('xml:lang') and meta['xml:lang'] in ['en', 'en_US']:
rec['abs'] = meta['content']
#FFT
elif meta['name'] == 'citation_pdf_url':
rec['FFT'] = meta['content']
#URN
elif meta['name'] == 'DC.identifier':
if meta.has_attr('scheme') and re.search('URI', meta['scheme']):
rec['urn'] = re.sub('.*harvard.edu\/', '', meta['content'])
rec['link'] = meta['content']
else:
rec['note'].append(meta['content'])
#keywords
elif meta['name'] == 'citation_keywords':
rec['keyw'] = re.split('[,;] ', meta['content'])
if not 'urn' in rec.keys():
rec['doi'] = '20.2000/Harvard' + re.sub('.*\/', '', rec['link'])
print ' ', rec.keys()
#closing of files and printing
xmlf = os.path.join(xmldir,jnlfilename+'.xml')
xmlfile = codecs.EncodedFile(codecs.open(xmlf,mode='wb'),'utf8')
ejlmod2.writenewXML(recs,xmlfile,publisher, jnlfilename)
xmlfile.close()
#retrival
retfiles_text = open(retfiles_path,"r").read()
line = jnlfilename+'.xml'+ "\n"
if not line in retfiles_text:
retfiles = open(retfiles_path,"a")
retfiles.write(line)
retfiles.close()
| [
"florian.schwennsen@desy.de"
] | florian.schwennsen@desy.de |
466d4d1bb55b8dafeb39b97860256ff284104ef0 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_jugulars.py | 7cd2dde2bba0293c62b58fbfd9c1b89af3e71010 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py |
from xai.brain.wordbase.adjectives._jugular import _JUGULAR
#calss header
class _JUGULARS(_JUGULAR, ):
def __init__(self,):
_JUGULAR.__init__(self)
self.name = "JUGULARS"
self.specie = 'adjectives'
self.basic = "jugular"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
eb724ce8161a951868219e4b96af89a03703ee0a | 1e4c3ea6fadfd2bcffe900a784009e59c9e33202 | /AxiomPro/DisplayingMixerComponent.py | f015a44ce89102f7e1b660ca435c361723864fcf | [] | no_license | mjimserra/AbletonLive9_RemoteScripts | e762e0c761f0af88fc1b9a8b42ef4dec2df02f72 | d08eb29fbf1ac4d12f73841023375059de5ba29a | refs/heads/master | 2021-05-27T21:34:04.962364 | 2014-08-03T22:49:15 | 2014-08-03T22:49:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,479 | py | #Embedded file name: /Users/versonator/Jenkins/live/Binary/Core_Release_static/midi-remote-scripts/AxiomPro/DisplayingMixerComponent.py
from _Framework.ButtonElement import ButtonElement
from _Framework.MixerComponent import MixerComponent
from _Framework.PhysicalDisplayElement import PhysicalDisplayElement
class DisplayingMixerComponent(MixerComponent):
""" Special mixer class that displays the Mute/Solo state of the selected track """
def __init__(self, num_tracks):
MixerComponent.__init__(self, num_tracks)
self._selected_tracks = []
self._display = None
self._mute_button = None
self._solo_button = None
self._register_timer_callback(self._on_timer)
def disconnect(self):
self._unregister_timer_callback(self._on_timer)
self._selected_tracks = None
MixerComponent.disconnect(self)
self._display = None
def set_display(self, display):
raise isinstance(display, PhysicalDisplayElement) or AssertionError
self._display = display
def set_solo_button(self, button):
if not (button == None or isinstance(button, ButtonElement) and button.is_momentary()):
raise AssertionError
self.selected_strip().set_solo_button(button)
if self._solo_button != button:
if self._solo_button != None:
self._solo_button.remove_value_listener(self._solo_value)
self._solo_button = button
self._solo_button != None and self._solo_button.add_value_listener(self._solo_value)
self.update()
def set_mute_button(self, button):
if not (button == None or isinstance(button, ButtonElement) and button.is_momentary()):
raise AssertionError
self.selected_strip().set_mute_button(button)
if self._mute_button != button:
if self._mute_button != None:
self._mute_button.remove_value_listener(self._mute_value)
self._mute_button = button
self._mute_button != None and self._mute_button.add_value_listener(self._mute_value)
self.update()
def _on_timer(self):
sel_track = None
while len(self._selected_tracks) > 0:
track = self._selected_tracks[-1]
if track != None and track.has_midi_input and track.can_be_armed and not track.arm:
sel_track = track
break
del self._selected_tracks[-1]
if sel_track != None:
found_recording_clip = False
song = self.song()
tracks = song.tracks
if song.is_playing:
check_arrangement = song.record_mode
for track in tracks:
if track.can_be_armed and track.arm:
if check_arrangement:
found_recording_clip = True
break
else:
playing_slot_index = track.playing_slot_index
if playing_slot_index in range(len(track.clip_slots)):
slot = track.clip_slots[playing_slot_index]
if slot.has_clip and slot.clip.is_recording:
found_recording_clip = True
break
if found_recording_clip or song.exclusive_arm:
for track in tracks:
if track.can_be_armed and track.arm and track != sel_track:
track.arm = False
sel_track.arm = True
sel_track.view.select_instrument()
self._selected_tracks = []
def _solo_value(self, value):
if not self._solo_button != None:
raise AssertionError
if not value in range(128):
raise AssertionError
if self._display != None and self.song().view.selected_track not in (self.song().master_track, None):
track = value != 0 and self.song().view.selected_track
display_string = str(track.name) + ': Solo '
track.solo and display_string += 'On'
else:
display_string += 'Off'
self._display.display_message(display_string)
else:
self._display.update()
def _mute_value(self, value):
if not self._mute_button != None:
raise AssertionError
if not value in range(128):
raise AssertionError
if self._display != None and self.song().view.selected_track not in (self.song().master_track, None):
track = value != 0 and self.song().view.selected_track
display_string = str(track.name) + ': Mute '
track.mute and display_string += 'On'
else:
display_string += 'Off'
self._display.display_message(display_string)
else:
self._display.update()
def _next_track_value(self, value):
MixerComponent._next_track_value(self, value)
self._selected_tracks.append(self.song().view.selected_track)
def _prev_track_value(self, value):
MixerComponent._prev_track_value(self, value)
self._selected_tracks.append(self.song().view.selected_track) | [
"julien@julienbayle.net"
] | julien@julienbayle.net |
4f706e123529a9d70768fd0c674f57ebc67ba8c0 | ed6625148299e759f39359db9f932dd391b8e86f | /personal_env/lib/python3.8/site-packages/django/template/backends/utils.py | a15e96d77a0902ad37e71d6e5aee17539bde31a4 | [
"MIT"
] | permissive | jestinmwilson/personal-website | 128c4717b21fa6fff9df8295b1137f32bbe44b55 | 6e47a7f33ed3b1ca5c1d42c89c5380d22992ed74 | refs/heads/main | 2023-08-28T11:31:07.916714 | 2021-10-14T09:41:13 | 2021-10-14T09:41:13 | 414,847,553 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | from django.middleware.csrf import get_token
from django.utils.functional import lazy
from django.utils.html import format_html
from django.utils.safestring import SafeString
def csrf_input(request):
return format_html(
'<input type="hidden" name="csrfmiddlewaretoken" value="{}">',
get_token(request))
csrf_input_lazy = lazy(csrf_input, SafeString, str)
csrf_token_lazy = lazy(get_token, str)
| [
"noreply@github.com"
] | jestinmwilson.noreply@github.com |
ad589e0a99c3938a5c763d820fe4999d6891dd38 | 9f59670ff100632e5a5e24d10a698e50c115dc35 | /devise/utils/tasks.py | 9bbec95328e00ed6d2a57630e9136befaacdfad4 | [] | no_license | mehdidc/reproduction | 7927990c94f6ffee92c16fd550ecf44060b5544d | 63add75dbdda0575bbc59b895092146cb92848e0 | refs/heads/master | 2020-04-28T02:25:33.388228 | 2017-04-06T21:11:39 | 2017-04-06T21:11:39 | 174,897,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | from invoke import task
import pickle
@task
def word_embedding_to_binary(filename, out_filename):
words = dict()
with open(filename) as fd:
for line in fd.readlines():
components = line.split(" ")
word = components[0]
embedding = map(float, components[1:])
words[word] = embedding
with open(out_filename, "w") as fd:
pickle.dump(words, fd)
| [
"mehdi@cherti.name"
] | mehdi@cherti.name |
ef72922eb4c2256568f87f0af32022faf169f981 | 020489f1519deb3dd6df459d2b4a853bf64c6278 | /triclelite/scramble/tools/common_tools.py | bfa2db731bbbcd0256f0039d1f06ad2318181bc8 | [] | no_license | reritom/Tricle-Lite | 8d59e58299b19ee355a2153def4d72fb890cb6ab | c01065da770e7723bccb55d7f314f8b4164861d6 | refs/heads/master | 2021-06-03T16:18:41.307052 | 2020-11-13T12:02:17 | 2020-11-13T12:02:17 | 116,177,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | def show_request(request):
'''
This method prints the request data and images. Only to be used in local
'''
print("data: " + str(request.POST))
print(request.FILES)
| [
"reikudjinn@gmail.com"
] | reikudjinn@gmail.com |
d029694c6ea99255d45c0c33dda89431de55aa7f | fe26b42d185c531b38a306fec6d35a6b00f03f88 | /multiprocess-queue.py | 8a8ddaa5d3dc4841792bc41e5e009d025cd59d55 | [] | no_license | szhmery/test-python | 067e15d94c2a214868432cbfc934f0d6c07ec711 | 65627c8dd9b13e6ae803e617ba3df5b7d88f9d27 | refs/heads/master | 2020-03-14T07:24:28.070812 | 2018-07-27T14:36:41 | 2018-07-27T14:36:41 | 131,504,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,004 | py | import time
from multiprocessing import Process, JoinableQueue, Queue
from random import random
tasks_queue = JoinableQueue()
results_queue = Queue()
def double(n):
return n * 2
def producer(in_queue):
while 1:
wt = random()
time.sleep(wt)
in_queue.put((double, wt))
if wt > 0.9:
in_queue.put(None)
print 'stop producer'
break
def consumer(in_queue, out_queue):
while 1:
task = in_queue.get()
if task is None:
break
func, arg = task
result = func(arg)
in_queue.task_done()
out_queue.put(result)
processes = []
p = Process(target=producer, args=(tasks_queue,))
p.start()
processes.append(p)
p = Process(target=consumer, args=(tasks_queue, results_queue))
p.start()
processes.append(p)
tasks_queue.join()
for p in processes:
p.join()
while 1:
if results_queue.empty():
break
result = results_queue.get()
print 'Result:', result
| [
"szhmery@gmail.com"
] | szhmery@gmail.com |
7be6f2344f4f155b1d863065c960619f6ca0a958 | 03cb73ffb69f2caa0f91b62d99d8694d24c6c932 | /arc/__init__.py | 0e94ed53d9bb541061d79220b060b96dd4f1f249 | [
"MIT"
] | permissive | Den4200/arc | 84f1b69beeb2d1aa6be78a740277772d586127a9 | 55d9c43c0db9f3342ef5b5e8fed429e423ad1f3a | refs/heads/master | 2023-05-10T16:04:21.050895 | 2020-05-03T08:22:38 | 2020-05-03T08:22:38 | 260,573,175 | 1 | 0 | MIT | 2021-06-02T01:43:46 | 2020-05-01T22:49:11 | Python | UTF-8 | Python | false | false | 686 | py | from typing import List, Tuple, Union
class _Keycodes:
"""
Returns multiple keycodes from
a dictionary in one request.
"""
def __init__(self) -> None:
self.keys = {
'enter': 13,
'del': 127,
'backspace': 8,
'esc': 27,
'c': 99,
'x': 120,
's': 115,
'=': 61,
'-': 45
}
def __getitem__(self, items: Union[str, Tuple]) -> List[int]:
if isinstance(items, (int, str)):
items = [items]
return [self.keys[item] for item in items]
KEYS = _Keycodes()
| [
"dpham.42@hotmail.com"
] | dpham.42@hotmail.com |
d5f1ccfa5c7346676dcd4a84e16d05fec5e5019b | f4547c0e47f9f4d4d6ba4fe3f2908094dc0ac511 | /first.py | fbab4d252666861d003ffc3dc71ae819e03456d5 | [] | no_license | gautamamber/python_mongodb- | c63e00be2eb19029d593462c65268c31a2733a18 | fa595d667c9820263256cabf9a6deae07ec70df8 | refs/heads/master | 2021-05-07T15:24:53.143954 | 2017-11-08T17:06:51 | 2017-11-08T17:06:51 | 110,005,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py | from pymongo import MongoClient
MONGODB_URI = "mongodb://amber:amber@ds155325.mlab.com:55325/amberamity"
client = MongoClient(MONGODB_URI, connectTimeoutMS=30000)
db = client.get_database("amberamity")
user_records = db.user_records
def getRECORD(user_id):
records = user_records.find_one({"user_id":user_id})
return records
def pushRECORD(record):
user_records.insert_one(record)
def updateRecord(record, updates):
user_records.update_one({'_id': record['_id']},{
'$set': updates
}, upsert=False)
record = {
"name": "gautam",
"age": "20",
"class" : "twelth"
}
pushRECORD(record)
| [
"ambergautam1@gmail.com"
] | ambergautam1@gmail.com |
3dd4730257d2ce9162b298d0a83a83683ab1fdb6 | 4900fcc64f66590068ba2197714b8ac4d2bc00fc | /posts/migrations/0024_subcat.py | 64427f1bc083f827dd955d7976162f7fda01079a | [] | no_license | Titania1/e-learning-platforme | ccea984afd1bc6407d9fd89369b17b47a3203f9a | 798633e16c8aab4a6b4ea66b1231f90b92d99cff | refs/heads/main | 2023-06-20T07:26:55.149463 | 2021-07-18T10:41:42 | 2021-07-18T10:41:42 | 387,149,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 887 | py | # Generated by Django 3.1.4 on 2021-01-04 11:56
import autoslug.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0023_auto_20210104_1057'),
]
operations = [
migrations.CreateModel(
name='subcat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='title', unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.category')),
],
),
]
| [
"shivamrohillaa@gmail.com"
] | shivamrohillaa@gmail.com |
55f99b5bd75bdb54382219c2c32332676bbf0b37 | 4ad7b285be90bf5e1dad8d81f741fe177c56dbf4 | /whelk/tests/test_basic.py | 94414511153074c958485a3408f29dc618dae43b | [
"LicenseRef-scancode-warranty-disclaimer",
"Zlib"
] | permissive | git-spindle-test-1/whelk | 1be65e23ca79203bc714a9b22a236f52d89ed13b | 03406c86a4435b698b9144ce674c8c8a994d8b5e | refs/heads/master | 2021-01-21T12:43:26.481368 | 2015-12-14T15:08:44 | 2015-12-18T18:30:52 | 56,990,062 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,365 | py | from whelk.tests import *
class BasicTest(unittest.TestCase):
"""Tests whether we can find commands"""
def test_notfound(self):
# Non-existing command
self.assertRaises(AttributeError, lambda: shell.i_do_not_exist)
self.assertRaises(KeyError, lambda: shell['/not/found'])
def test_basic(self):
# Basic command test
r = shell.test_return('0', 'output')
self.assertEqual(r.returncode, 0)
self.assertEqual(r.stdout, b('output\n'))
self.assertEqual(r.stderr, b(''))
r = shell.test_return('22', 'stdout', 'stderr')
self.assertEqual(r.returncode, 22)
self.assertEqual(r.stdout, b('stdout\n'))
self.assertEqual(r.stderr, b('stderr\n'))
def test_underscores(self):
# Underscore-replacement
c = shell.test_dashes
self.assertTrue('test-dashes' in c.name)
r = c(0)
self.assertEqual(r.returncode, 0)
self.assertEqual(r.stderr, b(''))
self.assertEqual(r.stdout, b(''))
def test_exceptions(self):
self.assertRaises(CommandFailed, lambda: shell.false(raise_on_error=True))
def test_defaults(self):
s = Shell(stdout = shell.STDOUT)
input = b("Testing 1 2 3")
r = s.cat(input=input)
self.assertEqual(r.returncode, 0)
self.assertEqual(r.stdout, input)
| [
"dennis@kaarsemaker.net"
] | dennis@kaarsemaker.net |
d2c17457e9ba693684064ebe430d7e92251f9529 | c97536dc1d63e5ab99a3c494cdbd7c329e654efd | /api/client/test/test_pipeline_service_api.py | 01a580101c9ebd2fa52b9387194901e9d484f82c | [
"Apache-2.0"
] | permissive | sabary661990615/mlx | 9958180c42e00b32498b572098789046c927aa0c | 7684155c074f1bd1d16ab183598ce6d19414267b | refs/heads/main | 2023-07-14T12:45:35.408653 | 2021-08-20T21:34:17 | 2021-08-20T21:34:17 | 394,724,026 | 0 | 0 | Apache-2.0 | 2021-08-10T17:11:49 | 2021-08-10T17:11:48 | null | UTF-8 | Python | false | false | 2,377 | py | # Copyright 2021 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
MLX API
MLX API Extension for Kubeflow Pipelines # noqa: E501
OpenAPI spec version: 0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.pipeline_service_api import PipelineServiceApi # noqa: E501
from swagger_client.rest import ApiException
class TestPipelineServiceApi(unittest.TestCase):
"""PipelineServiceApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.pipeline_service_api.PipelineServiceApi() # noqa: E501
def tearDown(self):
pass
def test_approve_pipelines_for_publishing(self):
"""Test case for approve_pipelines_for_publishing
"""
pass
def test_create_pipeline(self):
"""Test case for create_pipeline
"""
pass
def test_delete_pipeline(self):
"""Test case for delete_pipeline
"""
pass
def test_download_pipeline_files(self):
"""Test case for download_pipeline_files
Returns the pipeline YAML compressed into a .tgz (.tar.gz) file. # noqa: E501
"""
pass
def test_get_pipeline(self):
"""Test case for get_pipeline
"""
pass
def test_get_template(self):
"""Test case for get_template
"""
pass
def test_list_pipelines(self):
"""Test case for list_pipelines
"""
pass
def test_set_featured_pipelines(self):
"""Test case for set_featured_pipelines
"""
pass
def test_upload_pipeline(self):
"""Test case for upload_pipeline
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"82406273+mlx-bot@users.noreply.github.com"
] | 82406273+mlx-bot@users.noreply.github.com |
0b07d00ae2dbeac0f4c27afa2a0627c8cecf4ce3 | 264ff719d21f2f57451f322e9296b2f55b473eb2 | /tools/nntool/reports/activation_reporter.py | 00b3388c2cf0163e0f28e97977614afe6899ef17 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | knmcguire/gap_sdk | 06c9537c16fa45dea6b7f5c6b162b53953262915 | 7b0a09a353ab6f0550793d40bd46e98051f4a3d7 | refs/heads/master | 2020-12-20T06:51:19.580497 | 2020-01-21T14:52:28 | 2020-01-21T14:52:28 | 235,992,961 | 0 | 0 | Apache-2.0 | 2020-01-24T11:45:59 | 2020-01-24T11:45:58 | null | UTF-8 | Python | false | false | 3,693 | py | # Copyright (C) 2019 GreenWaves Technologies
# All rights reserved.
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
import copy
from collections import OrderedDict
from graph.nngraph import NNGraph
from utils.stats_funcs import STATS_BITS, astats, calculate_qsnrs
from utils.tabular import Tabular, TabularColumn
from utils.node_id import NodeId
from .reporter import Reporter
def gather_stats(activation, force_ideal=False):
stat = astats(activation)
stat['qstats'] = calculate_qsnrs(activation, stat['ibits'], force_ideal)
return stat
def appendn(row, rep, val=""):
for _ in range(rep):
row.append(val)
def do_header(table):
header = [
TabularColumn("name"),
TabularColumn("mean", fmt=">.3f"),
TabularColumn("std dev", fmt=">.3f"),
TabularColumn("min", fmt=">.3f"),
TabularColumn("max", fmt=">.3f"),
TabularColumn("min acc", fmt=">.3f"),
TabularColumn("max acc", fmt=">.3f"),
TabularColumn("int\nbits", fmt=">d"),
]
for bit_size in STATS_BITS:
header.append(TabularColumn("{}-bits\nformat".format(bit_size), fmt="^s"))
header.append(TabularColumn("{}-bits\nQSNR".format(bit_size), fmt=">.0f"))
header.append(TabularColumn("size\n(bytes)", fmt=">d"))
table.add_row(header)
def do_row(table, node_name, stat, threshold, total):
row = [node_name, stat['mean'], stat['std'],
stat['min'], stat['max']]
if 'min_acc' in stat:
row.append(stat['min_acc'])
row.append(stat['max_acc'])
else:
row.append("")
row.append("")
row.append(stat['ibits'])
if 'qstats' not in stat:
appendn(row, len(STATS_BITS) * 2 + 1)
table.add_row(row)
return total
qstats = stat['qstats']
sel = None
for bit_size in STATS_BITS:
if bit_size in qstats:
qstat = qstats[bit_size]
if (sel is None or sel > bit_size) and qstat['qsnr'] > threshold:
sel = bit_size
row.append("Q{}".format(qstat['q']))
row.append(qstat['qsnr'])
else:
appendn(row, 2)
if sel is not None:
size = stat['size']*sel//8
total += size
row.append(size)
else:
row.append("")
table.add_row(row)
return total
def do_rows(stats, table, threshold):
total = 0
for node_name, stat in stats.items():
total = do_row(table, node_name, stat, threshold, total)
return total
def do_total(table, total):
total_row = ["TOTAL"]
appendn(total_row, 7 + len(STATS_BITS) * 2)
total_row.append(total)
table.add_row(total_row)
def dump_stats_table(stats, do_totals=True, threshold=30):
table = Tabular()
do_header(table)
total = do_rows(stats, table, threshold)
if do_totals:
do_total(table, total)
return table
class ActivationReporter(Reporter):
def __init__(self, do_totals=True, threshold=30.0, yield_fusions=False):
self._do_totals = do_totals
self._threshold = threshold
self._yield_fusions = yield_fusions
def report(self, G: NNGraph, stats):
dump_stats = OrderedDict()
for _, node, fusion_idx, fnode in G.nodes_iterator(self._yield_fusions):
stat = stats[NodeId(node, fnode)]
stat = copy.deepcopy(stat)
if fusion_idx:
name = "{}_{}".format(node.name, fusion_idx)
else:
name = node.name
dump_stats[name] = stat
return dump_stats_table(dump_stats, do_totals=self._do_totals, threshold=self._threshold)
| [
"noreply@github.com"
] | knmcguire.noreply@github.com |
6b57348d5f5ad826051c302ab6dd0b359e0c9756 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/gigasecond/442744b90ea54dde9d89774dcc794c00.py | cac1e2d437346165b5cedc399ecfb21e3134ec27 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 167 | py | from datetime import date, timedelta
gigaseconds = timedelta(seconds=10**9)
def add_gigasecond(birthdate):
return birthdate + gigaseconds
| [
"rrc@berkeley.edu"
] | rrc@berkeley.edu |
5102a36c3d40740383d2bf17d4d414eaded9c386 | 870639af1487cf59b548f56c9cd1a45928c1e2c2 | /tests/components/knx/test_services.py | c61dc54258630841851c356535c813aae14a0971 | [
"Apache-2.0"
] | permissive | atmurray/home-assistant | 9f050944d26c084f8f21e8612a7b90c0ae909763 | 133cb2c3b0e782f063c8a30de4ff55a5c14b9b03 | refs/heads/dev | 2023-03-19T04:26:40.743852 | 2021-11-27T05:58:25 | 2021-11-27T05:58:25 | 234,724,430 | 2 | 0 | Apache-2.0 | 2023-02-22T06:18:36 | 2020-01-18T11:27:02 | Python | UTF-8 | Python | false | false | 5,995 | py | """Test KNX services."""
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.core import HomeAssistant
from .conftest import KNXTestKit
from tests.common import async_capture_events
async def test_send(hass: HomeAssistant, knx: KNXTestKit):
"""Test `knx.send` service."""
test_address = "1/2/3"
await knx.setup_integration({})
# send DPT 1 telegram
await hass.services.async_call(
"knx", "send", {"address": test_address, "payload": True}, blocking=True
)
await knx.assert_write(test_address, True)
# send raw DPT 5 telegram
await hass.services.async_call(
"knx", "send", {"address": test_address, "payload": [99]}, blocking=True
)
await knx.assert_write(test_address, (99,))
# send "percent" DPT 5 telegram
await hass.services.async_call(
"knx",
"send",
{"address": test_address, "payload": 99, "type": "percent"},
blocking=True,
)
await knx.assert_write(test_address, (0xFC,))
# send "temperature" DPT 9 telegram
await hass.services.async_call(
"knx",
"send",
{"address": test_address, "payload": 21.0, "type": "temperature"},
blocking=True,
)
await knx.assert_write(test_address, (0x0C, 0x1A))
# send multiple telegrams
await hass.services.async_call(
"knx",
"send",
{"address": [test_address, "2/2/2", "3/3/3"], "payload": 99, "type": "percent"},
blocking=True,
)
await knx.assert_write(test_address, (0xFC,))
await knx.assert_write("2/2/2", (0xFC,))
await knx.assert_write("3/3/3", (0xFC,))
async def test_read(hass: HomeAssistant, knx: KNXTestKit):
"""Test `knx.read` service."""
await knx.setup_integration({})
# send read telegram
await hass.services.async_call("knx", "read", {"address": "1/1/1"}, blocking=True)
await knx.assert_read("1/1/1")
# send multiple read telegrams
await hass.services.async_call(
"knx",
"read",
{"address": ["1/1/1", "2/2/2", "3/3/3"]},
blocking=True,
)
await knx.assert_read("1/1/1")
await knx.assert_read("2/2/2")
await knx.assert_read("3/3/3")
async def test_event_register(hass: HomeAssistant, knx: KNXTestKit):
"""Test `knx.event_register` service."""
events = async_capture_events(hass, "knx_event")
test_address = "1/2/3"
await knx.setup_integration({})
# no event registered
await knx.receive_write(test_address, True)
await hass.async_block_till_done()
assert len(events) == 0
# register event with `type`
await hass.services.async_call(
"knx",
"event_register",
{"address": test_address, "type": "2byte_unsigned"},
blocking=True,
)
await knx.receive_write(test_address, (0x04, 0xD2))
await hass.async_block_till_done()
assert len(events) == 1
typed_event = events.pop()
assert typed_event.data["data"] == (0x04, 0xD2)
assert typed_event.data["value"] == 1234
# remove event registration - no event added
await hass.services.async_call(
"knx",
"event_register",
{"address": test_address, "remove": True},
blocking=True,
)
await knx.receive_write(test_address, True)
await hass.async_block_till_done()
assert len(events) == 0
# register event without `type`
await hass.services.async_call(
"knx", "event_register", {"address": test_address}, blocking=True
)
await knx.receive_write(test_address, True)
await knx.receive_write(test_address, False)
await hass.async_block_till_done()
assert len(events) == 2
untyped_event_2 = events.pop()
assert untyped_event_2.data["data"] is False
assert untyped_event_2.data["value"] is None
untyped_event_1 = events.pop()
assert untyped_event_1.data["data"] is True
assert untyped_event_1.data["value"] is None
async def test_exposure_register(hass: HomeAssistant, knx: KNXTestKit):
"""Test `knx.exposure_register` service."""
test_address = "1/2/3"
test_entity = "fake.entity"
test_attribute = "fake_attribute"
await knx.setup_integration({})
# no exposure registered
hass.states.async_set(test_entity, STATE_ON, {})
await knx.assert_no_telegram()
# register exposure
await hass.services.async_call(
"knx",
"exposure_register",
{"address": test_address, "entity_id": test_entity, "type": "binary"},
blocking=True,
)
hass.states.async_set(test_entity, STATE_OFF, {})
await knx.assert_write(test_address, False)
# register exposure
await hass.services.async_call(
"knx",
"exposure_register",
{"address": test_address, "remove": True},
blocking=True,
)
hass.states.async_set(test_entity, STATE_ON, {})
await knx.assert_no_telegram()
# register exposure for attribute with default
await hass.services.async_call(
"knx",
"exposure_register",
{
"address": test_address,
"entity_id": test_entity,
"attribute": test_attribute,
"type": "percentU8",
"default": 0,
},
blocking=True,
)
# no attribute on first change wouldn't work because no attribute change since last test
hass.states.async_set(test_entity, STATE_ON, {test_attribute: 30})
await knx.assert_write(test_address, (30,))
hass.states.async_set(test_entity, STATE_OFF, {})
await knx.assert_write(test_address, (0,))
# don't send same value sequentially
hass.states.async_set(test_entity, STATE_ON, {test_attribute: 25})
hass.states.async_set(test_entity, STATE_ON, {test_attribute: 25})
hass.states.async_set(test_entity, STATE_ON, {test_attribute: 25, "unrelated": 2})
hass.states.async_set(test_entity, STATE_OFF, {test_attribute: 25})
await knx.assert_telegram_count(1)
await knx.assert_write(test_address, (25,))
| [
"noreply@github.com"
] | atmurray.noreply@github.com |
0de155d0c6246dd70a43627dbd6d4515146bed41 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_167/ch56_2019_04_02_11_22_12_410576.py | b71102a26ce7e6987c76cd0a9081fe8dbd16784a | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | def calcula_total_da_nota (n):
l=[]
b=[]
i=0
n=0
while i and n < len(n) and len(i):
total= n*i
i+=1
n+=1
| [
"you@example.com"
] | you@example.com |
bcc91652c70b1a92f4873c008d7dabf8801030e9 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02645/s156048297.py | afa43aa220d5ff522e36ba28420cfeec8ee2b4de | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29 | py | a=str(input())
print(a[0:3])
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
679461f083307a15e18fd892f8642f995542c663 | 3f5a1ef51620fd8c35ef38064ca5aa00776ab6f4 | /ds_and_algo_educative/Circular_Linked_List/Delete_node.py | b03ffc0b97506c96ab87f6e8860db2a1d712a1da | [] | no_license | poojagmahajan/python_exercises | 1b290a5c0689f703538caf89bca5bc6c1fdb392a | 65539cf31c5b2ad5768d652ed5fe95054ce5f63f | refs/heads/master | 2022-11-12T03:52:13.533781 | 2020-07-04T20:50:29 | 2020-07-04T20:54:46 | 263,151,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,958 | py |
"""steps -
1. if List is not empty
2. if Deleting the head node
set cur to head
iterate till next of cur is head
(means here cur is last node which points to head)
3. if head is only node in list means point to self
then make head none
4. else (list having more nodes ,not just head)
head delete
and update new head to next node of old head
5. else (deleting node other than head)
take two pointers cur and prev
iterate till head
update pointers prev and cur
if key match
then delete node and update cur """
class Node:
def __init__(self, data):
self.data = data
self.next = None
class CircularLinkedList:
def __init__(self):
self.head = None
def prepend(self, data):
new_node = Node(data)
cur = self.head
new_node.next = self.head
if not self.head:
new_node.next = new_node
else:
while cur.next != self.head:
cur = cur.next
cur.next = new_node
self.head = new_node
def append(self, data):
if not self.head:
self.head = Node(data)
self.head.next = self.head
else:
new_node = Node(data)
cur = self.head
while cur.next != self.head:
cur = cur.next
cur.next = new_node
new_node.next = self.head
def print_list(self):
cur = self.head
while cur:
print(cur.data)
cur = cur.next
if cur == self.head:
break
def remove(self, key):
if self.head : # List is not empty
if self.head.data == key : #node to be Delete is head node
cur = self.head
while cur.next != self.head :
cur = cur.next # at end cur will be last node points to head
if self.head == self.head.next : #only one node in list
self.head = None
else: # #head with other nodes preent in list
cur.next = self.head.next #delete head
self.head = self.head.next # make new head
else: # node to be delete is other than head
cur = self.head
prev = None
while cur.next != self.head: # traverse the list till end
prev = cur
cur = cur.next
if cur.data == key: # if node match
prev.next = cur.next #delete node
cur = cur.next
cllist = CircularLinkedList()
cllist.append("A")
cllist.append("B")
cllist.append("C")
cllist.append("D")
cllist.remove("A")
cllist.remove("C")
cllist.print_list() | [
"mahajanpoojag@gmail.com"
] | mahajanpoojag@gmail.com |
8392d6f1d064da42edf447bcb039414eb395da39 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/network/v20200301/route_filter.py | 31d4a201da6cf4027db382609ba4d081877b78b9 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,170 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['RouteFilterArgs', 'RouteFilter']
@pulumi.input_type
class RouteFilterArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
route_filter_name: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a RouteFilter resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] route_filter_name: The name of the route filter.
:param pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]] rules: Collection of RouteFilterRules contained within a route filter.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if id is not None:
pulumi.set(__self__, "id", id)
if location is not None:
pulumi.set(__self__, "location", location)
if route_filter_name is not None:
pulumi.set(__self__, "route_filter_name", route_filter_name)
if rules is not None:
pulumi.set(__self__, "rules", rules)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="routeFilterName")
def route_filter_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the route filter.
"""
return pulumi.get(self, "route_filter_name")
@route_filter_name.setter
def route_filter_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "route_filter_name", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]]]:
"""
Collection of RouteFilterRules contained within a route filter.
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]]]):
pulumi.set(self, "rules", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class RouteFilter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_filter_name: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteFilterRuleArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Route Filter Resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] route_filter_name: The name of the route filter.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteFilterRuleArgs']]]] rules: Collection of RouteFilterRules contained within a route filter.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RouteFilterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Route Filter Resource.
:param str resource_name: The name of the resource.
:param RouteFilterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RouteFilterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_filter_name: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteFilterRuleArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RouteFilterArgs.__new__(RouteFilterArgs)
__props__.__dict__["id"] = id
__props__.__dict__["location"] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["route_filter_name"] = route_filter_name
__props__.__dict__["rules"] = rules
__props__.__dict__["tags"] = tags
__props__.__dict__["etag"] = None
__props__.__dict__["ipv6_peerings"] = None
__props__.__dict__["name"] = None
__props__.__dict__["peerings"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:network:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20161201:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20170301:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20170601:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20170801:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20170901:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20171001:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20171101:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20180101:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20180201:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20180401:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20180601:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20180701:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20180801:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20181001:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20181101:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20181201:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20190201:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20190401:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20190601:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20190701:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20190801:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20190901:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20191101:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20191201:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20200401:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20200501:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20200601:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20200701:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20200801:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20201101:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20210201:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20210301:RouteFilter"), pulumi.Alias(type_="azure-native:network/v20210501:RouteFilter")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(RouteFilter, __self__).__init__(
'azure-native:network/v20200301:RouteFilter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'RouteFilter':
"""
Get an existing RouteFilter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = RouteFilterArgs.__new__(RouteFilterArgs)
__props__.__dict__["etag"] = None
__props__.__dict__["ipv6_peerings"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["peerings"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["rules"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return RouteFilter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="ipv6Peerings")
def ipv6_peerings(self) -> pulumi.Output[Sequence['outputs.ExpressRouteCircuitPeeringResponse']]:
"""
A collection of references to express route circuit ipv6 peerings.
"""
return pulumi.get(self, "ipv6_peerings")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def peerings(self) -> pulumi.Output[Sequence['outputs.ExpressRouteCircuitPeeringResponse']]:
"""
A collection of references to express route circuit peerings.
"""
return pulumi.get(self, "peerings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the route filter resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def rules(self) -> pulumi.Output[Optional[Sequence['outputs.RouteFilterRuleResponse']]]:
"""
Collection of RouteFilterRules contained within a route filter.
"""
return pulumi.get(self, "rules")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
| [
"noreply@github.com"
] | bpkgoud.noreply@github.com |
13e571b7ef732cb18760a4b8eac56aae11b7b6f6 | af500242dc59de0855873e87a1f7f3ff69f7c9b0 | /discord_bot.py | 08bca3a0117cb0e2921510f9b396bd5bc3e1a628 | [] | no_license | Sispheor/discord_troll_bot | 0400aefe9ca6477139b498c6850cf2d710810a10 | 6621556cd63c20e21865de6f05760f6fb321674d | refs/heads/master | 2022-02-22T09:05:09.423633 | 2021-10-24T19:00:08 | 2021-10-24T19:30:07 | 156,092,074 | 0 | 0 | null | 2021-04-18T10:05:53 | 2018-11-04T14:44:24 | Python | UTF-8 | Python | false | false | 2,669 | py | # authorize bot
# https://discordapp.com/oauth2/authorize?&client_id=<my_id>&scope=bot&permissions=0
# apt install ffmpeg
import os
import signal
import discord as discord
from database_loader import get_database
from models.discord_user import DiscordUser
from models.game_session import GameSession
from my_discord_client import MyClient
import logging
client = None
def init_logger():
logger = logging.getLogger('discord_bot')
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
def init_database():
print("Init database")
db = get_database("troll_bot")
db.connect()
db.create_tables([DiscordUser, GameSession])
db.close()
print("Init database... done")
def main():
logger = logging.getLogger('discord_bot')
logger.info("Start Discord Troll Bot")
server_id = os.getenv('DISCORD_SERVER_ID', None)
bot_id = os.getenv('DISCORD_BOT_ID', None)
discord_token = os.getenv('DISCORD_TOKEN', None)
mysql_host = os.getenv('MYSQL_HOST', "127.0.0.1")
mysql_user = os.getenv('MYSQL_USER', None)
mysql_password = os.getenv('MYSQL_PASSWORD', None)
mysql_database = os.getenv('MYSQL_DATABASE', None)
if server_id is None:
print("You must provide a 'DISCORD_SERVER_ID'")
exit(1)
if bot_id is None:
print("You must provide a 'DISCORD_BOT_ID'")
exit(1)
if discord_token is None:
print("You must provide a 'DISCORD_TOKEN'")
exit(1)
if mysql_user is None:
print("You must provide a 'MYSQL_USER'")
exit(1)
if mysql_password is None:
print("You must provide a 'MYSQL_PASSWORD'")
exit(1)
if mysql_host is None:
print("You must provide a 'MYSQL_HOST'")
exit(1)
if mysql_database is None:
print("You must provide a 'MYSQL_DATABASE'")
exit(1)
logger.info("DISCORD_SERVER_ID: %s" % server_id)
logger.info("DISCORD_BOT_ID: %s" % bot_id)
intents = discord.Intents.default()
intents.typing = False
intents.members = True
intents.presences = True
client = MyClient(intents=intents)
client.run(discord_token)
def handle_exit():
print("Clean exit")
if client is not None:
client.change_presence(status=discord.Status.offline)
client.logout()
client.close()
print("Disconnected")
if __name__ == "__main__":
signal.signal(signal.SIGINT, handle_exit)
signal.signal(signal.SIGTERM, handle_exit)
init_logger()
init_database()
main()
| [
"nico.marcq@gmail.com"
] | nico.marcq@gmail.com |
f1571ba276d35a04cb87417fbfbd357256c78554 | 4250618abef0d0dcf399f8a2a23e2049c3458ea8 | /website/wiki/editors/__init__.py | 926c100f57e637ce11700d4e06c62066e48e9193 | [
"MIT"
] | permissive | skbly7/serc | 121fd7e88df25213de4d53fce4bd03c2ea448d68 | 4442298ee05c24c3c6bacffdc56a9f6076397cce | refs/heads/master | 2020-12-27T03:18:45.280464 | 2019-05-16T06:10:31 | 2019-05-16T19:13:12 | 53,425,352 | 0 | 2 | MIT | 2019-05-16T19:13:14 | 2016-03-08T16:00:03 | Python | UTF-8 | Python | false | false | 420 | py | from __future__ import absolute_import
from wiki.conf import settings
from django.core.urlresolvers import get_callable
_EditorClass = None
_editor = None
def getEditorClass():
global _EditorClass
if not _EditorClass:
_EditorClass = get_callable(settings.EDITOR)
return _EditorClass
def getEditor():
global _editor
if not _editor:
_editor = getEditorClass()()
return _editor
| [
"skbly7@gmail.com"
] | skbly7@gmail.com |
bffda1a8a18b767fb92c11803909d33101a396ef | 6c8b3ef3b6a3e77ee9a3cc56898217654b043154 | /typings/rdkit/Chem/Suppliers/DbMolSupplier.pyi | 61f763c7a446fccf2fb3da136c8676a0431cb1d8 | [
"MIT"
] | permissive | Andy-Wilkinson/ChemMLToolkit | 8a1eb24ab317c470bc89efa206e38734cb83a7d2 | 83efc7ea66d2def860a3e04ccd70d77fb689fddc | refs/heads/main | 2021-12-26T04:44:05.566942 | 2021-12-13T21:59:57 | 2021-12-13T21:59:57 | 171,165,863 | 2 | 2 | MIT | 2021-12-13T17:18:30 | 2019-02-17T20:00:01 | Python | UTF-8 | Python | false | false | 1,565 | pyi | """
This type stub file was generated by pyright.
"""
from rdkit.Chem.Suppliers.MolSupplier import MolSupplier
"""
Supplies a class for working with molecules from databases
"""
def warning(msg, dest=...): # -> None:
...
class DbMolSupplier(MolSupplier):
"""
new molecules come back with all additional fields from the
database set in a "_fieldsFromDb" data member
"""
def __init__(self, dbResults, molColumnFormats=..., nameCol=..., transformFunc=..., **kwargs) -> None:
"""
DbResults should be a subclass of Dbase.DbResultSet.DbResultBase
"""
...
def GetColumnNames(self): # -> list[Unknown] | tuple[Unknown, ...]:
...
class ForwardDbMolSupplier(DbMolSupplier):
""" DbMol supplier supporting only forward iteration
new molecules come back with all additional fields from the
database set in a "_fieldsFromDb" data member
"""
def __init__(self, dbResults, **kwargs) -> None:
"""
DbResults should be an iterator for Dbase.DbResultSet.DbResultBase
"""
...
def Reset(self): # -> None:
...
def NextMol(self): # -> None:
"""
NOTE: this has side effects
"""
...
class RandomAccessDbMolSupplier(DbMolSupplier):
def __init__(self, dbResults, **kwargs) -> None:
"""
DbResults should be a Dbase.DbResultSet.RandomAccessDbResultSet
"""
...
def __len__(self): # -> int:
...
def __getitem__(self, idx): # -> None:
...
def Reset(self): # -> None:
...
def NextMol(self): # -> None:
...
| [
"Andy-Wilkinson@users.noreply.github.com"
] | Andy-Wilkinson@users.noreply.github.com |
ea2d8384e1eb62039f6b064003a69cf716fde930 | e26dffd4a54c8c897613bb62a6587d224be8f669 | /.venv/bin/pip3.8 | c109039e60222b71da8a7912000dc14609cc5ade | [] | no_license | MohammedGhafri/django_crud | c5b8984a94dbd8bc592e3c9aab33b76971acac9b | 39e6b4b6b1ef6b1b7ef49c4df991037bdf9e0abf | refs/heads/master | 2022-12-24T12:46:18.394854 | 2020-09-23T08:10:17 | 2020-09-23T08:10:17 | 297,665,059 | 0 | 1 | null | 2020-09-23T08:10:19 | 2020-09-22T13:54:54 | Python | UTF-8 | Python | false | false | 247 | 8 | #!/home/ghafri/django_crud/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"eng.m.ghafri@gmail.com"
] | eng.m.ghafri@gmail.com |
33d9495d1ef2da387aa48d675ce46f75f0399f46 | e2b828b2a83ffdc908399a62afbdcfe45d4e5ce1 | /project/urls.py | addf5a1ad0fb3df153e795251248c82c84cef294 | [] | no_license | mamee93/django-ecommerc | 31294bbce7026e8987d1a21aa0988928127fae82 | d0764b50a981b9d77c9c34f01d2bcad7da9ba34e | refs/heads/main | 2023-02-08T20:32:11.877218 | 2021-01-03T16:04:28 | 2021-01-03T16:04:28 | 326,438,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('allauth.urls')),
]
| [
"mameeal@gmail.com"
] | mameeal@gmail.com |
1ec663443d423b72112a60041ac5443ef0a9f4b3 | 321b4ed83b6874eeb512027eaa0b17b0daf3c289 | /94/94.binary-tree-inorder-traversal.234795664.Accepted.leetcode.py | 26e7f20ae3496c22874206d6011746e3b09545bf | [] | no_license | huangyingw/submissions | 7a610613bdb03f1223cdec5f6ccc4391149ca618 | bfac1238ecef8b03e54842b852f6fec111abedfa | refs/heads/master | 2023-07-25T09:56:46.814504 | 2023-07-16T07:38:36 | 2023-07-16T07:38:36 | 143,352,065 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | class Solution(object):
def inorderTraversal(self, root):
result = []
stack = []
while stack or root:
if root:
stack.append(root)
root = root.left
else:
root = stack.pop()
result.append(root.val)
root = root.right
return result
| [
"huangyingw@gmail.com"
] | huangyingw@gmail.com |
80f5d642f07cd6433d6b58cab64d9c53b8440152 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2941/60621/242434.py | 0e697308ad5b1ceb32b7b72dad3cfcad3fe71ad9 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | a=int(input())
b=input()
d={"A":4,"B":3,"C":2,"D":1,"F":0,"E":0}
c=0
for i in b:
c+=d[i]
print("{:.14f}".format(c/len(b))) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
69632ae9cb5db0756f0d21a8818b31c02b613944 | 6f182e6b07b2574a1266e02f1e872e6530a0eb19 | /graph2text/onmt/bin/preprocess.py | 3d8ffedca4e28360f2a80529780c9147a8ac3a7e | [
"MIT",
"Apache-2.0"
] | permissive | ahoho/kg2text | bc6b6dd6609c40e299881beeeb139d781df48339 | 4eaecc3d45a47049fdef2cdb7d309f6f32203be2 | refs/heads/master | 2022-12-24T09:40:05.301432 | 2020-06-26T06:56:37 | 2020-06-26T06:56:37 | 275,256,661 | 0 | 0 | Apache-2.0 | 2020-06-26T22:11:10 | 2020-06-26T22:11:09 | null | UTF-8 | Python | false | false | 11,471 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Pre-process Data / features files and build vocabulary
"""
import codecs
import glob
import gc
import torch
from collections import Counter, defaultdict
from onmt.utils.logging import init_logger, logger
from onmt.utils.misc import split_corpus
import onmt.inputters as inputters
import onmt.opts as opts
from onmt.utils.parse import ArgumentParser
from onmt.inputters.inputter import _build_fields_vocab,\
_load_vocab
from functools import partial
from multiprocessing import Pool
def check_existing_pt_files(opt, corpus_type, ids, existing_fields):
""" Check if there are existing .pt files to avoid overwriting them """
existing_shards = []
for maybe_id in ids:
if maybe_id:
shard_base = corpus_type + "_" + maybe_id
else:
shard_base = corpus_type
pattern = opt.save_data + '.{}.*.pt'.format(shard_base)
if glob.glob(pattern):
if opt.overwrite:
maybe_overwrite = ("will be overwritten because "
"`-overwrite` option is set.")
else:
maybe_overwrite = ("won't be overwritten, pass the "
"`-overwrite` option if you want to.")
logger.warning("Shards for corpus {} already exist, {}"
.format(shard_base, maybe_overwrite))
existing_shards += [maybe_id]
return existing_shards
def process_one_shard(corpus_params, params):
corpus_type, fields, graph_reader, src_reader, tgt_reader, align_reader, opt,\
existing_fields, src_vocab, tgt_vocab = corpus_params
i, (graph_shard, src_shard, tgt_shard, align_shard, maybe_id, filter_pred) = params
# create one counter per shard
sub_sub_counter = defaultdict(Counter)
assert len(src_shard) == len(tgt_shard) == len(graph_shard)
logger.info("Building shard %d." % i)
src_data = {"reader": src_reader, "data": src_shard, "dir": opt.src_dir}
graph_data = {"reader": graph_reader, "data": graph_shard, "dir": None}
tgt_data = {"reader": tgt_reader, "data": tgt_shard, "dir": None}
align_data = {"reader": align_reader, "data": align_shard, "dir": None}
_readers, _data, _dir = inputters.Dataset.config(
[('graph', graph_data), ('src', src_data), ('tgt', tgt_data), ('align', align_data)])
dataset = inputters.Dataset(
fields, readers=_readers, data=_data, dirs=_dir,
sort_key=inputters.str2sortkey[opt.data_type],
filter_pred=filter_pred
)
if corpus_type == "train" and existing_fields is None:
for ex in dataset.examples:
for name, field in fields.items():
if ((opt.data_type == "audio") and (name == "src")):
continue
try:
f_iter = iter(field)
except TypeError:
f_iter = [(name, field)]
all_data = [getattr(ex, name, None)]
else:
all_data = getattr(ex, name)
for (sub_n, sub_f), fd in zip(
f_iter, all_data):
has_vocab = (sub_n == 'src' and
src_vocab is not None) or \
(sub_n == 'tgt' and
tgt_vocab is not None)
if (hasattr(sub_f, 'sequential')
and sub_f.sequential and not has_vocab):
val = fd
sub_sub_counter[sub_n].update(val)
if maybe_id:
shard_base = corpus_type + "_" + maybe_id
else:
shard_base = corpus_type
data_path = "{:s}.{:s}.{:d}.pt".\
format(opt.save_data, shard_base, i)
logger.info(" * saving %sth %s data shard to %s."
% (i, shard_base, data_path))
dataset.save(data_path)
del dataset.examples
gc.collect()
del dataset
gc.collect()
return sub_sub_counter
def maybe_load_vocab(corpus_type, counters, opt):
src_vocab = None
tgt_vocab = None
existing_fields = None
if corpus_type == "train":
if opt.src_vocab != "":
try:
logger.info("Using existing vocabulary...")
existing_fields = torch.load(opt.src_vocab)
except torch.serialization.pickle.UnpicklingError:
logger.info("Building vocab from text file...")
src_vocab, src_vocab_size = _load_vocab(
opt.src_vocab, "src", counters,
opt.src_words_min_frequency)
if opt.tgt_vocab != "":
tgt_vocab, tgt_vocab_size = _load_vocab(
opt.tgt_vocab, "tgt", counters,
opt.tgt_words_min_frequency)
return src_vocab, tgt_vocab, existing_fields
def build_save_dataset(corpus_type, fields, src_reader, graph_reader, tgt_reader,
align_reader, opt):
assert corpus_type in ['train', 'valid']
if corpus_type == 'train':
counters = defaultdict(Counter)
srcs = opt.train_src
graphs = opt.train_graph
tgts = opt.train_tgt
ids = opt.train_ids
aligns = opt.train_align
elif corpus_type == 'valid':
counters = None
srcs = [opt.valid_src]
graphs = [opt.valid_graph]
tgts = [opt.valid_tgt]
ids = [None]
aligns = [opt.valid_align]
src_vocab, tgt_vocab, existing_fields = maybe_load_vocab(
corpus_type, counters, opt)
existing_shards = check_existing_pt_files(
opt, corpus_type, ids, existing_fields)
# every corpus has shards, no new one
if existing_shards == ids and not opt.overwrite:
return
def shard_iterator(graphs, srcs, tgts, ids, aligns, existing_shards,
existing_fields, corpus_type, opt):
"""
Builds a single iterator yielding every shard of every corpus.
"""
for graph, src, tgt, maybe_id, maybe_align in zip(graphs, srcs, tgts, ids, aligns):
if maybe_id in existing_shards:
if opt.overwrite:
logger.warning("Overwrite shards for corpus {}"
.format(maybe_id))
else:
if corpus_type == "train":
assert existing_fields is not None,\
("A 'vocab.pt' file should be passed to "
"`-src_vocab` when adding a corpus to "
"a set of already existing shards.")
logger.warning("Ignore corpus {} because "
"shards already exist"
.format(maybe_id))
continue
if ((corpus_type == "train" or opt.filter_valid)
and tgt is not None):
filter_pred = partial(
inputters.filter_example,
use_src_len=opt.data_type == "text",
max_src_len=opt.src_seq_length,
max_tgt_len=opt.tgt_seq_length)
else:
filter_pred = None
src_shards = split_corpus(src, opt.shard_size)
graph_shards = split_corpus(graph, opt.shard_size)
tgt_shards = split_corpus(tgt, opt.shard_size)
align_shards = split_corpus(maybe_align, opt.shard_size)
for i, (gs, ss, ts, a_s) in enumerate(
zip(graph_shards, src_shards, tgt_shards, align_shards)):
yield (i, (gs, ss, ts, a_s, maybe_id, filter_pred))
shard_iter = shard_iterator(graphs, srcs, tgts, ids, aligns, existing_shards,
existing_fields, corpus_type, opt)
with Pool(opt.num_threads) as p:
dataset_params = (corpus_type, fields, graph_reader, src_reader, tgt_reader,
align_reader, opt, existing_fields,
src_vocab, tgt_vocab)
func = partial(process_one_shard, dataset_params)
for sub_counter in p.imap(func, shard_iter):
if sub_counter is not None:
for key, value in sub_counter.items():
counters[key].update(value)
if corpus_type == "train":
vocab_path = opt.save_data + '.vocab.pt'
if existing_fields is None:
fields = _build_fields_vocab(
fields, counters, opt.data_type,
opt.share_vocab, opt.vocab_size_multiple,
opt.src_vocab_size, opt.src_words_min_frequency,
opt.tgt_vocab_size, opt.tgt_words_min_frequency)
else:
fields = existing_fields
torch.save(fields, vocab_path)
def build_save_vocab(train_dataset, fields, opt):
fields = inputters.build_vocab(
train_dataset, fields, opt.data_type, opt.share_vocab,
opt.src_vocab, opt.src_vocab_size, opt.src_words_min_frequency,
opt.tgt_vocab, opt.tgt_vocab_size, opt.tgt_words_min_frequency,
vocab_size_multiple=opt.vocab_size_multiple
)
vocab_path = opt.save_data + '.vocab.pt'
torch.save(fields, vocab_path)
def count_features(path):
"""
path: location of a corpus file with whitespace-delimited tokens and
│-delimited features within the token
returns: the number of features in the dataset
"""
with codecs.open(path, "r", "utf-8") as f:
first_tok = f.readline().split(None, 1)[0]
return len(first_tok.split(u"│")) - 1
def preprocess(opt):
ArgumentParser.validate_preprocess_args(opt)
torch.manual_seed(opt.seed)
init_logger(opt.log_file)
logger.info("Extracting features...")
src_nfeats = 0
tgt_nfeats = 0
for src, tgt in zip(opt.train_src, opt.train_tgt):
src_nfeats += count_features(src) if opt.data_type == 'text' \
else 0
tgt_nfeats += count_features(tgt) # tgt always text so far
logger.info(" * number of source features: %d." % src_nfeats)
logger.info(" * number of target features: %d." % tgt_nfeats)
logger.info("Building `Fields` object...")
fields = inputters.get_fields(
opt.data_type,
src_nfeats,
tgt_nfeats,
dynamic_dict=opt.dynamic_dict,
with_align=opt.train_align[0] is not None,
src_truncate=opt.src_seq_length_trunc,
tgt_truncate=opt.tgt_seq_length_trunc)
src_reader = inputters.str2reader[opt.data_type].from_opt(opt)
graph_reader = inputters.str2reader["text"].from_opt(opt)
tgt_reader = inputters.str2reader["text"].from_opt(opt)
align_reader = inputters.str2reader["text"].from_opt(opt)
logger.info("Building & saving training data...")
build_save_dataset(
'train', fields, src_reader, graph_reader, tgt_reader, align_reader, opt)
if opt.valid_src and opt.valid_tgt:
logger.info("Building & saving validation data...")
build_save_dataset(
'valid', fields, src_reader, graph_reader, tgt_reader, align_reader, opt)
def _get_parser():
parser = ArgumentParser(description='preprocess.py')
opts.config_opts(parser)
opts.preprocess_opts(parser)
return parser
def main():
parser = _get_parser()
opt = parser.parse_args()
preprocess(opt)
if __name__ == "__main__":
main()
| [
"ribeiro@aiphes.tu-darmstadt.de"
] | ribeiro@aiphes.tu-darmstadt.de |
8fe8d8bbb0912b1b6723ae96719c9d83cd3a7cb7 | 60fa442ae76b960ab21b10fb527c0eac85cdc587 | /python/GetBfacsfromPdb2Pdb.py | 25a0cab9d1c0a5eb66ba89cf74ff05ede32f6a54 | [] | no_license | pjanowski/Pawel_PhD_Scripts | 8e6c2b92b492f9cacf425327a01faaceb27bb87d | 5f9b1735ca6da8fdf0946d6748f3da7d3d723d5e | refs/heads/master | 2021-01-10T06:15:30.287053 | 2015-11-16T04:04:07 | 2015-11-16T04:04:07 | 46,250,317 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,178 | py | #! /usr/bin/python
import sys
import os
from numpy import *
#####input the following variables
crystalfile=sys.argv[1]
targetfile=sys.argv[2]
#############################
#This is to get the bfactors from the original cif or pdb file and put them
#the bfactor column of another file. If atoms are in the same order in the
#two files, you can just use cut, but if not use this script. It identifies
#atoms by atomname and residue number.
f=open(crystalfile,'r')
p= [l for l in f.readlines() if l.strip()]
f.close()
#read in amber pdb (eliminate H and EW atoms,
f=open(targetfile,'r')
a = [l for l in f.readlines() if l.strip()]
f.close()
f=open('newfile.pdb','w')
for line in a:
if line[0:6] != 'ATOM ':
f.write(line)
else:
check=0
resnum=int(line[22:26])
atomname=line[12:16].strip()
for line2 in p:
if line2[0:6] != 'ATOM ':
continue
if (int(line2[22:26]))==resnum and line2[12:16].strip()==atomname:
bfactor=float(line2[60:66])
f.write(line[0:60]+'%6.2f' %bfactor +line[66:])
check=+1
if check>1:
print "oh boy something is wrong"
print line
if check==0:
f.write(line)
print line
f.close()
| [
"pawelrc@gmail.com"
] | pawelrc@gmail.com |
e466432360117169eeda09a88c691d3f1ac4ecda | 2c74bb301f1ed83b79254944183ac5a18a639fdf | /tests/components/file/test_sensor.py | 725ccb527f8127426461473a8f07eae6fda3b9de | [
"Apache-2.0"
] | permissive | Adminiuga/home-assistant | 5bec93007ddac1a268cc359bf7e48530c5f73b38 | dcf68d768e4f628d038f1fdd6e40bad713fbc222 | refs/heads/dev | 2023-02-22T22:03:31.013931 | 2022-11-09T00:27:20 | 2022-11-09T00:27:20 | 123,929,062 | 5 | 4 | Apache-2.0 | 2023-02-22T06:14:31 | 2018-03-05T14:11:09 | Python | UTF-8 | Python | false | false | 3,066 | py | """The tests for local file sensor platform."""
from unittest.mock import Mock, patch
import pytest
from homeassistant.const import STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import get_fixture_path, mock_registry
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
async def test_file_value(hass: HomeAssistant) -> None:
"""Test the File sensor."""
config = {
"sensor": {
"platform": "file",
"name": "file1",
"file_path": get_fixture_path("file_value.txt", "file"),
}
}
with patch.object(hass.config, "is_allowed_path", return_value=True):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.file1")
assert state.state == "21"
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
async def test_file_value_template(hass: HomeAssistant) -> None:
"""Test the File sensor with JSON entries."""
config = {
"sensor": {
"platform": "file",
"name": "file2",
"file_path": get_fixture_path("file_value_template.txt", "file"),
"value_template": "{{ value_json.temperature }}",
}
}
with patch.object(hass.config, "is_allowed_path", return_value=True):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.file2")
assert state.state == "26"
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
async def test_file_empty(hass: HomeAssistant) -> None:
"""Test the File sensor with an empty file."""
config = {
"sensor": {
"platform": "file",
"name": "file3",
"file_path": get_fixture_path("file_empty.txt", "file"),
}
}
with patch.object(hass.config, "is_allowed_path", return_value=True):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.file3")
assert state.state == STATE_UNKNOWN
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
async def test_file_path_invalid(hass: HomeAssistant) -> None:
"""Test the File sensor with invalid path."""
config = {
"sensor": {
"platform": "file",
"name": "file4",
"file_path": get_fixture_path("file_value.txt", "file"),
}
}
with patch.object(hass.config, "is_allowed_path", return_value=False):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("sensor")) == 0
| [
"noreply@github.com"
] | Adminiuga.noreply@github.com |
cff39237331fe9e1d9a2bd00d5eb9295e1a2f178 | 3c936cecac73c0de0ce8bca959ef9b49abf96b5e | /slowfast/utils/benchmark.py | 94b94b4f8038517f82d504da2378cc5937ed7c48 | [
"Apache-2.0"
] | permissive | AlexanderMelde/SlowFast | 8cce07d399d3b0d2fe08bf471b5f69e147e9c9e3 | b26b3ec3f3b4cd34c3d626b0fa06818bc69327f4 | refs/heads/master | 2022-11-19T00:50:49.484136 | 2020-04-29T23:28:08 | 2020-04-29T23:30:51 | 257,588,764 | 0 | 0 | Apache-2.0 | 2020-04-30T20:51:40 | 2020-04-21T12:29:11 | Python | UTF-8 | Python | false | false | 3,198 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
Functions for benchmarks.
"""
import numpy as np
import pprint
import torch
import tqdm
from fvcore.common.timer import Timer
import slowfast.utils.logging as logging
import slowfast.utils.misc as misc
from slowfast.datasets import loader
from slowfast.utils.env import setup_environment
logger = logging.get_logger(__name__)
def benchmark_data_loading(cfg):
"""
Benchmark the speed of data loading in PySlowFast.
Args:
cfg (CfgNode): configs. Details can be found in
slowfast/config/defaults.py
"""
# Set up environment.
setup_environment()
# Set random seed from configs.
np.random.seed(cfg.RNG_SEED)
torch.manual_seed(cfg.RNG_SEED)
# Setup logging format.
logging.setup_logging(cfg.OUTPUT_DIR)
# Print config.
logger.info("Benchmark data loading with config:")
logger.info(pprint.pformat(cfg))
timer = Timer()
dataloader = loader.construct_loader(cfg, "train")
logger.info(
"Initialize loader using {:.2f} seconds.".format(timer.seconds())
)
# Total batch size across different machines.
batch_size = cfg.TRAIN.BATCH_SIZE * cfg.NUM_SHARDS
log_period = cfg.BENCHMARK.LOG_PERIOD
epoch_times = []
# Test for a few epochs.
for cur_epoch in range(cfg.BENCHMARK.NUM_EPOCHS):
timer = Timer()
timer_epoch = Timer()
iter_times = []
for cur_iter, _ in enumerate(tqdm.tqdm(dataloader)):
if cur_iter > 0 and cur_iter % log_period == 0:
iter_times.append(timer.seconds())
ram_usage, ram_total = misc.cpu_mem_usage()
logger.info(
"Epoch {}: {} iters ({} videos) in {:.2f} seconds. "
"RAM Usage: {:.2f}/{:.2f} GB.".format(
cur_epoch,
log_period,
log_period * batch_size,
iter_times[-1],
ram_usage,
ram_total,
)
)
timer.reset()
epoch_times.append(timer_epoch.seconds())
ram_usage, ram_total = misc.cpu_mem_usage()
logger.info(
"Epoch {}: in total {} iters ({} videos) in {:.2f} seconds. "
"RAM Usage: {:.2f}/{:.2f} GB.".format(
cur_epoch,
len(dataloader),
len(dataloader) * batch_size,
epoch_times[-1],
ram_usage,
ram_total,
)
)
logger.info(
"Epoch {}: on average every {} iters ({} videos) take {:.2f}/{:.2f} "
"(avg/std) seconds.".format(
cur_epoch,
log_period,
log_period * batch_size,
np.mean(iter_times),
np.std(iter_times),
)
)
logger.info(
"On average every epoch ({} videos) takes {:.2f}/{:.2f} "
"(avg/std) seconds.".format(
len(dataloader) * batch_size,
np.mean(epoch_times),
np.std(epoch_times),
)
)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.