blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e7fce07a2a9175afc2ab1e85bb8ee860ae8f77dd | d755aaaf905b48baf31aa90332f03c45f4c8dad3 | /tests/test_utils.py | 494f1115abbc6feaa46c734931099119ef47fe06 | [
"Apache-2.0"
] | permissive | django-ftl/fluent-compiler | 36ffe0c76678e82f4f15bbccef057c8e4cd0e6bc | d8f19b47161788fbdea9822b130ef136fb839540 | refs/heads/master | 2023-08-08T04:16:52.368218 | 2023-07-21T10:37:17 | 2023-07-21T10:37:17 | 248,319,322 | 20 | 1 | NOASSERTION | 2023-04-18T16:14:05 | 2020-03-18T19:04:42 | Python | UTF-8 | Python | false | false | 1,560 | py | import unittest
from fluent_compiler.errors import FluentFormatError
from fluent_compiler.utils import Any, inspect_function_args
class TestInspectFunctionArgs(unittest.TestCase):
def test_inspect_function_args_positional(self):
self.assertEqual(inspect_function_args(lambda: None, "name", []), (0, []))
self.assertEqual(inspect_function_args(lambda x: None, "name", []), (1, []))
self.assertEqual(inspect_function_args(lambda x, y: None, "name", []), (2, []))
def test_inspect_function_args_var_positional(self):
self.assertEqual(inspect_function_args(lambda *args: None, "name", []), (Any, []))
def test_inspect_function_args_keywords(self):
self.assertEqual(inspect_function_args(lambda x, y=1, z=2: None, "name", []), (1, ["y", "z"]))
def test_inspect_function_args_var_keywords(self):
self.assertEqual(inspect_function_args(lambda x, **kwargs: None, "name", []), (1, Any))
def test_inspect_function_args_var_positional_plus_keywords(self):
self.assertEqual(inspect_function_args(lambda x, y=1, *args: None, "name", []), (Any, ["y"]))
def test_inspect_function_args_bad_keyword_args(self):
def foo():
pass
foo.ftl_arg_spec = (0, ["bad kwarg", "good", "this-is-fine-too"])
errors = []
self.assertEqual(inspect_function_args(foo, "FOO", errors), (0, ["good", "this-is-fine-too"]))
self.assertEqual(
errors,
[FluentFormatError("FOO() has invalid keyword argument name 'bad kwarg'")],
)
| [
"L.Plant.98@cantab.net"
] | L.Plant.98@cantab.net |
1bcabdb6dea75afd506399158d7c92cea07c82c0 | 0cbf36f06f5316326ef635f14c887cd2849800db | /typings/celery/utils/graph.pyi | 331cef71c17845a88735c126210f7a54a83b7b86 | [
"Apache-2.0"
] | permissive | espritgames/celery_types | b59545a7cd28f06e766a1a520590f3bbc155e82f | 4d4064eb78d2a1a3e79a5fefe111f59ad4d3c9b9 | refs/heads/main | 2023-08-18T20:11:33.992509 | 2021-10-04T11:21:49 | 2021-10-04T11:21:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53 | pyi | class DependencyGraph: ...
class GraphFormatter: ...
| [
"steve@dignam.xyz"
] | steve@dignam.xyz |
b9881ef2ec773ce08737733c455d00c4e0f5a07e | bcf0e03ebd7e55588dcf48ab5d990534f8d9ab0c | /CodeChef/Archive 2019/dijikstra algo.py | 8a5245f7d11e94a49ff2d6015c4aea7afa726d55 | [] | no_license | nsky80/competitive_programming | 731321aaf42d9ae546f1d13bbb05215a1fbcfe45 | 9b0c0ffccf092d4d4bbf50cac1746f44dd977d57 | refs/heads/master | 2022-02-06T11:58:44.313635 | 2022-01-30T09:20:15 | 2022-01-30T09:20:15 | 199,516,791 | 1 | 2 | null | 2022-01-30T09:20:16 | 2019-07-29T19:43:17 | Python | UTF-8 | Python | false | false | 2,697 | py | # Python program for Dijkstra's single
# source shortest path algorithm. The program is
# for adjacency matrix representation of the graph
# Library for INT_MAX
import sys
class Graph():
def __init__(self, vertices):
self.V = vertices
self.graph = [ [ 0 for column in range(vertices) ]
for row in range(vertices) ]
def printSolution(self, dist):
print("Vertex tDistance from Source")
for node in range(self.V):
print(node, "t", dist[ node ])
# A utility function to find the vertex with
# minimum distance value, from the set of vertices
# not yet included in shortest path tree
def minDistance(self, dist, sptSet):
# Initilaize minimum distance for next node
min = sys.maxint
# Search not nearest vertex not in the
# shortest path tree
for v in range(self.V):
if dist[ v ] < min and sptSet[ v ] == False:
min = dist[ v ]
min_index = v
return min_index
# Funtion that implements Dijkstra's single source
# shortest path algorithm for a graph represented
# using adjacency matrix representation
def dijkstra(self, src):
dist = [ sys.maxint ] * self.V
dist[ src ] = 0
sptSet = [ False ] * self.V
for cout in range(self.V):
# Pick the minimum distance vertex from
# the set of vertices not yet processed.
# u is always equal to src in first iteration
u = self.minDistance(dist, sptSet)
# Put the minimum distance vertex in the
# shotest path tree
sptSet[ u ] = True
# Update dist value of the adjacent vertices
# of the picked vertex only if the current
# distance is greater than new distance and
# the vertex in not in the shotest path tree
for v in range(self.V):
if self.graph[ u ][ v ] > 0 and sptSet[ v ] == False and \
dist[ v ] > dist[ u ] + self.graph[ u ][ v ]:
dist[ v ] = dist[ u ] + self.graph[ u ][ v ]
self.printSolution(dist)
# Driver program
g = Graph(9)
g.graph = [ [ 0, 4, 0, 0, 0, 0, 0, 8, 0 ],
[ 4, 0, 8, 0, 0, 0, 0, 11, 0 ],
[ 0, 8, 0, 7, 0, 4, 0, 0, 2 ],
[ 0, 0, 7, 0, 9, 14, 0, 0, 0 ],
[ 0, 0, 0, 9, 0, 10, 0, 0, 0 ],
[ 0, 0, 4, 14, 10, 0, 2, 0, 0 ],
[ 0, 0, 0, 0, 0, 2, 0, 1, 6 ],
[ 8, 11, 0, 0, 0, 0, 1, 0, 7 ],
[ 0, 0, 2, 0, 0, 0, 6, 7, 0 ]
]
g.dijkstra(0)
# This code is contributed by Divyanshu Mehta
| [
"satishkumary80@gmail.com"
] | satishkumary80@gmail.com |
123d610b04b8cb8328ed1b6faa51e01b3cf9a9f5 | f62fd455e593a7ad203a5c268e23129473d968b6 | /python-cloudkittyclient-1.0.0/cloudkittyclient/common/base.py | 6212817e7747c35c8debdac13765dfa3e5a4de3b | [
"Apache-2.0"
] | permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 5,032 | py | # Copyright 2012 OpenStack Foundation
# Copyright 2015 Objectif Libre
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
import copy
from six.moves.urllib import parse
from cloudkittyclient.apiclient import base
from cloudkittyclient import exc
from cloudkittyclient.i18n import _
def getid(obj):
"""Extracts object ID.
Abstracts the common pattern of allowing both an object or an
object's ID (UUID) as a parameter when dealing with relationships.
"""
try:
return obj.id
except AttributeError:
return obj
class Manager(object):
"""Managers interact with a particular type of API.
It works with samples, meters, alarms, etc. and provide CRUD operations for
them.
"""
resource_class = None
def __init__(self, api):
self.api = api
@property
def client(self):
"""Compatible with latest oslo-incubator.apiclient code."""
return self.api
def _create(self, url, body):
body = self.api.post(url, json=body).json()
if body:
return self.resource_class(self, body)
def _list(self, url, response_key=None, obj_class=None, body=None,
expect_single=False):
resp = self.api.get(url)
if not resp.content:
raise exc.HTTPNotFound
body = resp.json()
if obj_class is None:
obj_class = self.resource_class
if response_key:
try:
data = body[response_key]
except KeyError:
return []
else:
data = body
if expect_single:
data = [data]
return [obj_class(self, res, loaded=True) for res in data if res]
def _update(self, url, item, response_key=None):
if not item.dirty_fields:
return item
item = self.api.put(url, json=item.dirty_fields).json()
# PUT requests may not return a item
if item:
return self.resource_class(self, item)
def _delete(self, url):
self.api.delete(url)
class CrudManager(base.CrudManager):
"""A CrudManager that automatically gets its base URL."""
base_url = None
def build_url(self, base_url=None, **kwargs):
base_url = base_url or self.base_url
return super(CrudManager, self).build_url(base_url, **kwargs)
def get(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._get(
self.build_url(**kwargs))
def create(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._post(
self.build_url(**kwargs), kwargs)
def update(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
params = kwargs.copy()
return self._put(
self.build_url(**kwargs), params)
def findall(self, base_url=None, **kwargs):
"""Find multiple items with attributes matching ``**kwargs``.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
rl = self._list(
'%(base_url)s%(query)s' % {
'base_url': self.build_url(base_url=base_url, **kwargs),
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
},
self.collection_key)
num = len(rl)
if num == 0:
msg = _("No %(name)s matching %(args)s.") % {
'name': self.resource_class.__name__,
'args': kwargs
}
raise exc.HTTPNotFound(msg)
return rl
class Resource(base.Resource):
"""A resource represents a particular instance of an object.
Resource might be tenant, user, etc.
This is pretty much just a bag for attributes.
:param manager: Manager object
:param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True
"""
key = None
def to_dict(self):
return copy.deepcopy(self._info)
@property
def dirty_fields(self):
out = self.to_dict()
for k, v in self._info.items():
if self.__dict__[k] != v:
out[k] = self.__dict__[k]
return out
def update(self):
try:
return self.manager.update(**self.dirty_fields)
except AttributeError:
raise exc.NotUpdatableError(self)
| [
"gongwayne@hotmail.com"
] | gongwayne@hotmail.com |
c07e6dc436df4a4558ff006c558c77a4e4786279 | 3cd9fc36f4abba93bffb11dc43f145db6c6f5408 | /azure-iot-device/azure/iot/device/common/models/x509.py | 8f5fdb82b63978aca852862ac2b91a43558562a9 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-iot-sdk-python | 457eb035e772268559ee8fa3310c210c84e52aa6 | 5d343d5904aaa98c6a88101e0dc40263acff4db2 | refs/heads/main | 2023-09-01T05:19:57.710222 | 2023-08-28T16:52:26 | 2023-08-28T16:52:26 | 70,936,068 | 441 | 438 | MIT | 2023-08-28T16:52:28 | 2016-10-14T18:17:15 | Python | UTF-8 | Python | false | false | 1,463 | py | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""This module represents a certificate that is responsible for providing client provided x509 certificates
that will eventually establish the authenticity of devices to IoTHub and Provisioning Services.
"""
class X509(object):
"""
A class with references to the certificate, key, and optional pass-phrase used to authenticate
a TLS connection using x509 certificates
"""
def __init__(self, cert_file, key_file, pass_phrase=None):
"""
Initializer for X509 Certificate
:param cert_file: The file path to contents of the certificate (or certificate chain)
used to authenticate the device.
:param key_file: The file path to the key associated with the certificate
:param pass_phrase: (optional) The pass_phrase used to encode the key file
"""
self._cert_file = cert_file
self._key_file = key_file
self._pass_phrase = pass_phrase
@property
def certificate_file(self):
return self._cert_file
@property
def key_file(self):
return self._key_file
@property
def pass_phrase(self):
return self._pass_phrase
| [
"noreply@github.com"
] | Azure.noreply@github.com |
3f9790b1a071e752f9653f18146064f75ac9a59f | 86d31b4e897555d67a7aed7302717c56c94bd538 | /0x08-python-more_classes/9-rectangle.py | 499f3b50d9e00af6c7990fd7618c17de0fc66ec3 | [] | no_license | jalondono/holbertonschool-higher_level_programming | bcefda6ea75d26cb44726fc74c396b1a1c22664d | a347138e3a214aa497c8a12dca702374dcd65f0d | refs/heads/master | 2020-07-22T22:53:32.929374 | 2020-02-13T20:10:08 | 2020-02-13T20:10:08 | 207,357,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,796 | py | #!/usr/bin/python3
class Rectangle:
"""Create a Rectangle class"""
number_of_instances = 0
print_symbol = '#'
def __init__(self, width=0, height=0):
self.width = width
self.height = height
Rectangle.number_of_instances += 1
@property
def width(self):
return self.__width
@width.setter
def width(self, value):
if not isinstance(value, int):
raise TypeError('width must be an integer')
if value < 0:
raise ValueError('width must be >= 0')
self.__width = value
@property
def height(self):
return self.__height
@height.setter
def height(self, value):
if not isinstance(value, int):
raise TypeError('height must be an integer')
if value < 0:
raise ValueError('height must be >= 0')
self.__height = value
def area(self):
"""
Return the area of a rectangle
Return:
- Area
"""
return self.__height * self.__width
def perimeter(self):
"""
Return the perimeter of a rectangle
Return:
- perimeter
"""
if self.__height == 0 or self.__width == 0:
return 0
else:
return 2 * (self.__height + self.__width)
def __str__(self):
"""
convert Python objects into strings
"""
if self.__width == 0 or self.__height == 0:
return ''
for i in range(0, self.__height):
for j in range(0, self.__width):
print(self.print_symbol, end='')
if i != self.__height - 1:
print()
return ''
def __repr__(self):
"""return the return value
would be a valid string object
"""
repr_str = self.__class__.__name__
return "{}({}, {})".format(repr_str, self.__width, self.__height)
def __del__(self):
"""
print a message when an instance is deleted
"""
print("Bye rectangle...")
Rectangle.number_of_instances -= 1
@staticmethod
def bigger_or_equal(rect_1, rect_2):
"""
static method to calculate
if anyone is bigger than other
:param rect_1:
:param rect_2:
:return:
"""
if not isinstance(rect_1, Rectangle):
raise TypeError("rect_1 must be an instance of Rectangle")
if not isinstance(rect_2, Rectangle):
raise TypeError("rect_2 must be an instance of Rectangle")
if (rect_1.height * rect_1.width) >= (rect_2.width * rect_2.height):
return rect_1
else:
return rect_2
@classmethod
def square(cls, size=0):
return Rectangle(size, size)
| [
"juanlondono151776@hotmail.com"
] | juanlondono151776@hotmail.com |
ac8be09ebd049d3bd03c35dfc7fc5a611fa2221a | 54bd004dd18f23b46fd75288823977a93d6c7c9d | /Python_basics/if_else_break.py | 21716c863210d80f7d34277f758adc8226939154 | [] | no_license | Gagangithub1988/Python | 13f914a200f6f4750c1b7da1467ca7e3f48814d0 | 8c9ba1902ac45841fd3145d49b08547420f15f2d | refs/heads/master | 2022-11-03T22:12:51.799829 | 2020-06-20T06:46:45 | 2020-06-20T06:46:45 | 273,642,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | numbers = [386, 462, 47, 418, 907, 344, 236, 375, 823, 566, 597, 978, 328, 615, 953, 345,399, 162, 758, 219, 918, 237, 412, 566, 826, 248, 866, 950, 626, 949, 687, 217, 815, 67, 104, 58, 512, 24, 892, 894, 767, 553, 81, 379, 843, 831, 445, 742, 717, 958,743, 527]
for i in numbers:
if i==237:
print(i)
break;
elif i%2==0:
print(i) | [
"noreply@github.com"
] | Gagangithub1988.noreply@github.com |
e9594ce23320791e0e9c6e4bd04e3cac7d201145 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_to_M_Gk3_no_pad/pyr_Tcrop255_pad60_jit15/Sob_k25_s001/pyr_2s/L3/step10_a.py | b59eafb0fca5bd0552df3fe049cb1ddd301c3516 | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,228 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_2side_L3 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_I_gt_MC
use_loss_obj = [G_sobel_k25_loss_info_builder.set_loss_target("UNet_Mask").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
ch032_1side_1__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"s89334roy@yahoo.com.tw"
] | s89334roy@yahoo.com.tw |
ecd12e83553b2c3605eb45cc536613b3ad79f4e8 | a450d455fc1da6f3a89eebb562cc2fb28784b129 | /games/views.py | 7edd5b3cae8a794ad0217bb7a542db339aa24716 | [
"MIT"
] | permissive | hawkthorne/bearweb | 6c62e0143ab6a19bee6cf340dfec81664f201dcb | 1533acd9c7610d9ea01e8413853cca70843b9d63 | refs/heads/master | 2021-05-28T05:54:55.533462 | 2014-03-16T23:12:01 | 2014-03-16T23:12:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,156 | py | from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.core.urlresolvers import reverse
from django.utils.cache import patch_response_headers
from django.template.defaultfilters import slugify
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.template.loader import render_to_string
from django.views.decorators.http import require_safe
from django.views.generic import DetailView, ListView
from django.views.generic.edit import CreateView, FormView, UpdateView
from django.conf import settings
from braces.views import LoginRequiredMixin
from .models import Game, Release, CrashReport
from .forms import LoveForm, GameForm, UpdateGameForm
from games import bundle
from games import tasks
def get_game(request, uuid):
game = get_object_or_404(Game, uuid=uuid)
if game.owner != request.user:
raise Http404
return game
class UUIDMixin(object):
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
class IdenticonDetail(UUIDMixin, DetailView):
model = Game
context_object_name = 'game'
def render_to_response(self, context, **kwargs):
response = HttpResponse(content_type="image/png")
image = context['game'].identicon(56)
image.save(response, 'PNG')
patch_response_headers(response, cache_timeout=31536000)
return response
class GameDetail(UUIDMixin, LoginRequiredMixin, DetailView):
model = Game
context_object_name = 'game'
def get_context_data(self, **kwargs):
context = super(GameDetail, self).get_context_data(**kwargs)
if context['game'].owner != self.request.user:
raise Http404
game = context['game']
context['KEEN_PROJECT_ID'] = settings.KEEN_PROJECT_ID
context['KEEN_READ_KEY'] = settings.KEEN_READ_KEY
context['releases'] = game.release_set.order_by('-created')[:10]
context['crash_reports'] = \
game.crashreport_set.order_by('-created')[:5]
return context
class ReportList(LoginRequiredMixin, ListView):
model = CrashReport
context_object_name = 'crash_reports'
def get_queryset(self):
self.game = get_game(self.request, self.kwargs['uuid'])
return self.game.crashreport_set.order_by('-created')
def get_context_data(self, **kwargs):
context = super(ReportList, self).get_context_data(**kwargs)
context['game'] = self.game
return context
class ReleaseList(LoginRequiredMixin, ListView):
model = Release
context_object_name = 'releases'
def get_queryset(self):
self.game = get_game(self.request, self.kwargs['uuid'])
return Release.objects.filter(game=self.game).order_by('-created')
def get_context_data(self, **kwargs):
context = super(ReleaseList, self).get_context_data(**kwargs)
context['game'] = self.game
context['show_love_version'] = True
return context
class ReleaseCreate(LoginRequiredMixin, FormView):
template_name = 'games/release_form.html'
form_class = LoveForm
def get_success_url(self):
return reverse('games:releases', kwargs={'uuid': self.kwargs['uuid']})
def get_context_data(self, **kwargs):
context = super(ReleaseCreate, self).get_context_data(**kwargs)
context['game'] = get_game(self.request, self.kwargs['uuid'])
return context
def form_valid(self, form):
game = get_game(self.request, self.kwargs['uuid'])
f = form.cleaned_data['lovefile']
version = form.cleaned_data['version']
if not game.valid_version(version):
errors = {
'invalid_version': version,
'game': game,
}
partial = render_to_string('games/upload_errors.html', errors)
return HttpResponseBadRequest(partial)
if not bundle.check_for_main(f):
errors = {'invalid_file': True}
partial = render_to_string('games/upload_errors.html', errors)
return HttpResponseBadRequest(partial)
love_version = bundle.detect_version(f) or "0.8.0"
# Get the latest release for the game, and increment the version
release = game.release_set.create(version=version,
love_version=love_version)
# FIXME: Abstract this away
f.name = "{}-original-{}.love".format(game.slug, version)
release.add_asset(f, tag='uploaded')
tasks.lovepackage.delay(release.pk)
return super(ReleaseCreate, self).form_valid(form)
class GameCreate(LoginRequiredMixin, CreateView):
model = Game
form_class = GameForm
def form_valid(self, form):
form.instance.owner = self.request.user
form.instance.slug = slugify(form.instance.name)
val = super(GameCreate, self).form_valid(form)
tasks.update_icns.delay(form.instance.pk)
return val
class GameUpdate(LoginRequiredMixin, UpdateView):
model = Game
template_name_suffix = '_update_form'
context_object_name = 'game'
form_class = UpdateGameForm
def get_success_url(self):
return reverse('games:edit', kwargs={'uuid': self.kwargs['uuid']})
def get_object(self, queryset=None):
return get_game(self.request, self.kwargs['uuid'])
def form_valid(self, form):
form.instance.slug = slugify(form.instance.name)
val = super(GameUpdate, self).form_valid(form)
tasks.update_icns.delay(form.instance.pk)
return val
@require_safe
def download(request, uuid, platform):
game = get_object_or_404(Game, uuid=uuid)
if not game.public:
raise Http404
if platform not in ['windows', 'osx', 'love']:
raise Http404
try:
release = game.latest_release()
except IndexError:
raise Http404
if platform == "windows":
url = release.windows_url()
elif platform == "osx":
url = release.osx_url()
else:
url = release.love_url()
if not url:
raise Http404
return redirect(url)
| [
"kyle@kyleconroy.com"
] | kyle@kyleconroy.com |
e079875a9982f729d64b8fcf715d00aa9d82fb99 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/toastdriven-restless/allPythonContent.py | 1e9383e427ae70bd92da9d2ee4ed3ef88e1e062a | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93,916 | py | __FILENAME__ = conf
# -*- coding: utf-8 -*-
#
# restless documentation build configuration file, created by
# sphinx-quickstart on Sat Jan 11 01:04:55 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'restless'
copyright = u'2014, Daniel Lindsley'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.0.1'
# The full version, including alpha/beta/rc tags.
release = '2.0.1-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'restlessdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'restless.tex', u'restless Documentation',
u'Daniel Lindsley', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'restless', u'restless Documentation',
[u'Daniel Lindsley'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'restless', u'restless Documentation',
u'Daniel Lindsley', 'restless', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Blerg. The autodocs for the Django module freak out if this isn't done.
from django.conf import settings
settings.configure()
########NEW FILE########
__FILENAME__ = api
from django.contrib.auth.models import User
from restless.dj import DjangoResource
from restless.preparers import FieldsPreparer
from posts.models import Post
class PostResource(DjangoResource):
preparer = FieldsPreparer(fields={
'id': 'id',
'title': 'title',
'author': 'user.username',
'body': 'content',
'posted_on': 'posted_on',
})
def list(self):
return Post.objects.all()
def detail(self, pk):
return Post.objects.get(id=pk)
def create(self):
return Post.objects.create(
title=self.data['title'],
user=User.objects.get(username=self.data['author']),
content=self.data['body']
)
def update(self, pk):
try:
post = Post.objects.get(id=pk)
except Post.DoesNotExist:
post = Post()
post.title = self.data['title']
post.user = User.objects.get(username=self.data['author'])
post.content = self.data['body']
post.save()
return post
def delete(self, pk):
Post.objects.get(id=pk).delete()
########NEW FILE########
__FILENAME__ = models
from django.contrib.auth.models import User
from django.db import models
class Post(models.Model):
user = models.ForeignKey(User, related_name='posts')
title = models.CharField(max_length=128)
slug = models.SlugField(blank=True)
content = models.TextField(default='', blank=True)
posted_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Meta(object):
ordering = ['-posted_on', 'title']
def __str__(self):
return self.title
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url, include
from .api import PostResource
urlpatterns = patterns('',
url(r'^posts/', include(PostResource.urls())),
# Alternatively, if you don't like the defaults...
# url(r'^posts/$', PostResource.as_list(), name='api_posts_list'),
# url(r'^posts/(?P<pk>\d+)/$', PostResource.as_detail(), name='api_posts_detail'),
)
########NEW FILE########
__FILENAME__ = app
from flask import Flask
import redis
from restless.fl import FlaskResource
import time
app = Flask(__name__)
class UserResource(FlaskResource):
def __init__(self, *args, **kwargs):
super(UserResource, self).__init__(*args, **kwargs)
self.conn = redis.StrictRedis(host='localhost', port=6379, db=0)
def is_authenticated(self):
return True
def make_user_key(self, username):
return 'user_{0}'.format(username)
def list(self):
usernames = self.conn.lrange('users', 0, 100)
users = []
for user in usernames:
users.append(self.conn.hgetall(self.make_user_key(user)))
return users
def detail(self, username):
return self.conn.hgetall(self.make_user_key(username))
def create(self):
key = self.make_user_key(self.data['username'])
self.conn.hmset(
key,
{
'username': self.data['username'],
'email': self.data['email'],
'added_on': int(time.time()),
}
)
self.conn.rpush('users', self.data['username'])
return self.conn.hgetall(key)
UserResource.add_url_rules(app, rule_prefix='/api/users/')
# Alternatively, if you don't like the defaults...
# app.add_url_rule('/api/users/', endpoint='api_users_list', view_func=UserResource.as_list(), methods=['GET', 'POST', 'PUT', 'DELETE'])
# app.add_url_rule('/api/users/<username>/', endpoint='api_users_detail', view_func=UserResource.as_detail(), methods=['GET', 'POST', 'PUT', 'DELETE'])
if __name__ == '__main__':
app.debug = True
app.run()
########NEW FILE########
__FILENAME__ = app
import time
import redis
from wsgiref.simple_server import make_server
from pyramid.config import Configurator
from restless.pyr import PyramidResource
class UserResource(PyramidResource):
def __init__(self, *args, **kwargs):
super(UserResource, self).__init__(*args, **kwargs)
self.conn = redis.StrictRedis(host='localhost', port=6379, db=0)
def is_authenticated(self):
return True
def make_user_key(self, username):
return 'user_{0}'.format(username)
def list(self):
usernames = self.conn.lrange('users', 0, 100)
users = []
for user in usernames:
users.append(self.conn.hgetall(self.make_user_key(user)))
return users
def detail(self, username):
return self.conn.hgetall(self.make_user_key(username))
def create(self):
key = self.make_user_key(self.data['username'])
self.conn.hmset(
key,
{
'username': self.data['username'],
'email': self.data['email'],
'added_on': int(time.time()),
}
)
self.conn.rpush('users', self.data['username'])
return self.conn.hgetall(key)
if __name__ == '__main__':
config = Configurator()
config = UserResource.add_views(config, '/users/')
app = config.make_wsgi_app()
server = make_server('0.0.0.0', 8080, app)
server.serve_forever()
########NEW FILE########
__FILENAME__ = constants
# HTTP Status Codes
OK = 200
CREATED = 201
ACCEPTED = 202
NO_CONTENT = 204
BAD_REQUEST = 400
UNAUTHORIZED = 401
NOT_FOUND = 404
METHOD_NOT_ALLOWED = 405
APPLICATION_ERROR = 500
METHOD_NOT_IMPLEMENTED = 501
########NEW FILE########
__FILENAME__ = data
class Data(object):
def __init__(self, value, should_prepare=True, prepare_with=None):
"""
A container object that carries meta information about the data.
``value`` should be the data to be returned to the client. This may
be post-processed.
``should_prepare`` determines whether additional post-processing
should occur & should be boolean. This is useful when returning objects
or with complex requirements. Default is ``True``.
``prepare_with`` is reserved for future use in specifying a custom
callable. Default is ``None`` (no custom callable).
"""
self.value = value
self.should_prepare = should_prepare
self.prepare_with = prepare_with
########NEW FILE########
__FILENAME__ = dj
import six
from django.conf import settings
from django.conf.urls import patterns, url
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, Http404
from django.views.decorators.csrf import csrf_exempt
from .exceptions import NotFound
from .resources import Resource
class DjangoResource(Resource):
"""
A Django-specific ``Resource`` subclass.
Doesn't require any special configuration, but helps when working in a
Django environment.
"""
# Because Django.
@classmethod
def as_list(self, *args, **kwargs):
return csrf_exempt(super(DjangoResource, self).as_list(*args, **kwargs))
@classmethod
def as_detail(self, *args, **kwargs):
return csrf_exempt(super(DjangoResource, self).as_detail(*args, **kwargs))
def is_debug(self):
# By default, Django-esque.
return settings.DEBUG
def build_response(self, data, status=200):
# By default, Django-esque.
resp = HttpResponse(data, content_type='application/json')
resp.status_code = status
return resp
def build_error(self, err):
# A bit nicer behavior surrounding things that don't exist.
if isinstance(err, (ObjectDoesNotExist, Http404)):
err = NotFound(msg=six.text_type(err))
return super(DjangoResource, self).build_error(err)
@classmethod
def build_url_name(cls, name, name_prefix=None):
"""
Given a ``name`` & an optional ``name_prefix``, this generates a name
for a URL.
:param name: The name for the URL (ex. 'detail')
:type name: string
:param name_prefix: (Optional) A prefix for the URL's name (for
resolving). The default is ``None``, which will autocreate a prefix
based on the class name. Ex: ``BlogPostResource`` ->
``api_blog_post_list``
:type name_prefix: string
:returns: The final name
:rtype: string
"""
if name_prefix is None:
name_prefix = 'api_{0}'.format(
cls.__name__.replace('Resource', '').lower()
)
name_prefix = name_prefix.rstrip('_')
return '_'.join([name_prefix, name])
@classmethod
def urls(cls, name_prefix=None):
"""
A convenience method for hooking up the URLs.
This automatically adds a list & a detail endpoint to your URLconf.
:param name_prefix: (Optional) A prefix for the URL's name (for
resolving). The default is ``None``, which will autocreate a prefix
based on the class name. Ex: ``BlogPostResource`` ->
``api_blogpost_list``
:type name_prefix: string
:returns: A ``patterns`` object for ``include(...)``
"""
return patterns('',
url(r'^$', cls.as_list(), name=cls.build_url_name('list', name_prefix)),
url(r'^(?P<pk>\d+)/$', cls.as_detail(), name=cls.build_url_name('detail', name_prefix)),
)
########NEW FILE########
__FILENAME__ = exceptions
from .constants import APPLICATION_ERROR, UNAUTHORIZED, NOT_FOUND, BAD_REQUEST
from .constants import METHOD_NOT_ALLOWED, METHOD_NOT_IMPLEMENTED
class RestlessError(Exception):
"""
A common base exception from which all other exceptions in ``restless``
inherit from.
No special attributes or behaviors.
"""
pass
class HttpError(RestlessError):
"""
The foundational HTTP-related error.
All other HTTP errors in ``restless`` inherit from this one.
Has a ``status`` attribute. If present, ``restless`` will use this as the
``status_code`` in the response.
Has a ``msg`` attribute. Has a reasonable default message (override-able
from the constructor).
"""
status = APPLICATION_ERROR
msg = "Application Error"
def __init__(self, msg=None):
if not msg:
msg = self.__class__.msg
super(HttpError, self).__init__(msg)
class BadRequest(HttpError):
status = BAD_REQUEST
msg = "Bad request."
class Unauthorized(HttpError):
status = UNAUTHORIZED
msg = "Unauthorized."
class NotFound(HttpError):
status = NOT_FOUND
msg = "Resource not found."
class MethodNotAllowed(HttpError):
status = METHOD_NOT_ALLOWED
msg = "The specified HTTP method is not allowed."
class MethodNotImplemented(HttpError):
status = METHOD_NOT_IMPLEMENTED
msg = "The specified HTTP method is not implemented."
########NEW FILE########
__FILENAME__ = fl
from flask import make_response
from flask import request
from .resources import Resource
class FlaskResource(Resource):
"""
A Flask-specific ``Resource`` subclass.
Doesn't require any special configuration, but helps when working in a
Flask environment.
"""
@classmethod
def as_list(cls, *init_args, **init_kwargs):
# Overridden here, because Flask uses a global ``request`` object
# rather than passing it to each view.
def _wrapper(*args, **kwargs):
# Make a new instance so that no state potentially leaks between
# instances.
inst = cls(*init_args, **init_kwargs)
inst.request = request
return inst.handle('list', *args, **kwargs)
return _wrapper
@classmethod
def as_detail(cls, *init_args, **init_kwargs):
# Overridden here, because Flask uses a global ``request`` object
# rather than passing it to each view.
def _wrapper(*args, **kwargs):
# Make a new instance so that no state potentially leaks between
# instances.
inst = cls(*init_args, **init_kwargs)
inst.request = request
return inst.handle('detail', *args, **kwargs)
return _wrapper
def request_body(self):
return self.request.data
def is_debug(self):
from flask import current_app
return current_app.debug
def build_response(self, data, status=200):
return make_response(data, status, {
'Content-Type': 'application/json'
})
@classmethod
def build_endpoint_name(cls, name, endpoint_prefix=None):
"""
Given a ``name`` & an optional ``endpoint_prefix``, this generates a name
for a URL.
:param name: The name for the URL (ex. 'detail')
:type name: string
:param endpoint_prefix: (Optional) A prefix for the URL's name (for
resolving). The default is ``None``, which will autocreate a prefix
based on the class name. Ex: ``BlogPostResource`` ->
``api_blog_post_list``
:type endpoint_prefix: string
:returns: The final name
:rtype: string
"""
if endpoint_prefix is None:
endpoint_prefix = 'api_{0}'.format(
cls.__name__.replace('Resource', '').lower()
)
endpoint_prefix = endpoint_prefix.rstrip('_')
return '_'.join([endpoint_prefix, name])
@classmethod
def add_url_rules(cls, app, rule_prefix, endpoint_prefix=None):
"""
A convenience method for hooking up the URLs.
This automatically adds a list & a detail endpoint to your routes.
:param app: The ``Flask`` object for your app.
:type app: ``flask.Flask``
:param rule_prefix: The start of the URL to handle.
:type rule_prefix: string
:param endpoint_prefix: (Optional) A prefix for the URL's name (for
endpoints). The default is ``None``, which will autocreate a prefix
based on the class name. Ex: ``BlogPostResource`` ->
``api_blog_post_list``
:type endpoint_prefix: string
:returns: Nothing
"""
methods = ['GET', 'POST', 'PUT', 'DELETE']
app.add_url_rule(
rule_prefix,
endpoint=cls.build_endpoint_name('list', endpoint_prefix),
view_func=cls.as_list(),
methods=methods
)
app.add_url_rule(
rule_prefix + '<pk>/',
endpoint=cls.build_endpoint_name('detail', endpoint_prefix),
view_func=cls.as_detail(),
methods=methods
)
########NEW FILE########
__FILENAME__ = it
import re
import itty
from restless.resources import Resource
class IttyResource(Resource):
"""
A Itty-specific ``Resource`` subclass.
Doesn't require any special configuration, but helps when working in a
Itty environment.
"""
debug = False
def is_debug(self):
return self.debug
def build_response(self, data, status=200):
return itty.Response(data, status=status, content_type='application/json')
@classmethod
def setup_urls(cls, rule_prefix):
"""
A convenience method for hooking up the URLs.
This automatically adds a list & a detail endpoint to your request
mappings.
:returns: ``None``
"""
list_url = "%s" % itty.add_slash(rule_prefix)
detail_url = "%s" % itty.add_slash(rule_prefix + "/(?P<pk>\d+)")
list_re = re.compile("^%s$" % list_url)
detail_re = re.compile("^%s$" % detail_url)
for method in ('GET', 'POST', 'PUT', 'DELETE'):
itty.REQUEST_MAPPINGS[method].append((list_re, list_url, cls.as_list()))
itty.REQUEST_MAPPINGS[method].append((detail_re, detail_url, cls.as_detail()))
########NEW FILE########
__FILENAME__ = preparers
class Preparer(object):
"""
A plain preparation object which just passes through data.
It also is relevant as the protocol subclasses should implement to work with
Restless.
"""
def __init__(self):
super(Preparer, self).__init__()
def prepare(self, data):
"""
Handles actually transforming the data.
By default, this does nothing & simply returns the data passed to it.
"""
return data
class FieldsPreparer(Preparer):
"""
A more complex preparation object, this will return a given set of fields.
This takes a ``fields`` parameter, which should be a dictionary of
keys (fieldnames to expose to the user) & values (a dotted lookup path to
the desired attribute/key on the object).
Example::
preparer = FieldsPreparer(fields={
# ``user`` is the key the client will see.
# ``author.pk`` is the dotted path lookup ``FieldsPreparer``
# will traverse on the data to return a value.
'user': 'author.pk',
})
"""
def __init__(self, fields):
super(FieldsPreparer, self).__init__()
self.fields = fields
def prepare(self, data):
"""
Handles transforming the provided data into the fielded data that should
be exposed to the end user.
Uses the ``lookup_data`` method to traverse dotted paths.
Returns a dictionary of data as the response.
"""
result = {}
if not self.fields:
# No fields specified. Serialize everything.
return data
for fieldname, lookup in self.fields.items():
result[fieldname] = self.lookup_data(lookup, data)
return result
def lookup_data(self, lookup, data):
"""
Given a lookup string, attempts to descend through nested data looking for
the value.
Can work with either dictionary-alikes or objects (or any combination of
those).
Lookups should be a string. If it is a dotted path, it will be split on
``.`` & it will traverse through to find the final value. If not, it will
simply attempt to find either a key or attribute of that name & return it.
Example::
>>> data = {
... 'type': 'message',
... 'greeting': {
... 'en': 'hello',
... 'fr': 'bonjour',
... 'es': 'hola',
... },
... 'person': Person(
... name='daniel'
... )
... }
>>> lookup_data('type', data)
'message'
>>> lookup_data('greeting.en', data)
'hello'
>>> lookup_data('person.name', data)
'daniel'
"""
value = data
parts = lookup.split('.')
if not parts or not parts[0]:
return value
part = parts[0]
remaining_lookup = '.'.join(parts[1:])
if hasattr(data, 'keys') and hasattr(data, '__getitem__'):
# Dictionary enough for us.
value = data[part]
else:
# Assume it's an object.
value = getattr(data, part)
if not remaining_lookup:
return value
# There's more to lookup, so dive in recursively.
return self.lookup_data(remaining_lookup, value)
########NEW FILE########
__FILENAME__ = pyr
from pyramid.response import Response
from .resources import Resource
class PyramidResource(Resource):
"""
A Pyramid-specific ``Resource`` subclass.
Doesn't require any special configuration, but helps when working in a
Pyramid environment.
"""
@classmethod
def as_list(cls, *args, **kwargs):
return super(PyramidResource, cls).as_list(*args, **kwargs)
@classmethod
def as_detail(cls, *init_args, **init_kwargs):
def _wrapper(request):
# Make a new instance so that no state potentially leaks between
# instances.
inst = cls(*init_args, **init_kwargs)
inst.request = request
name = request.matchdict['name']
return inst.handle('detail', name)
return _wrapper
def build_response(self, data, status=200):
resp = Response(data, status_code=status, content_type="application/json")
return resp
@classmethod
def build_routename(cls, name, routename_prefix=None):
"""
Given a ``name`` & an optional ``routename_prefix``, this generates a
name for a URL.
:param name: The name for the URL (ex. 'detail')
:type name: string
:param routename_prefix: (Optional) A prefix for the URL's name (for
resolving). The default is ``None``, which will autocreate a prefix
based on the class name. Ex: ``BlogPostResource`` ->
``api_blog_post_list``
:type routename_prefix: string
:returns: The final name
:rtype: string
"""
if routename_prefix is None:
routename_prefix = 'api_{0}'.format(
cls.__name__.replace('Resource', '').lower()
)
routename_prefix = routename_prefix.rstrip('_')
return '_'.join([routename_prefix, name])
@classmethod
def add_views(cls, config, rule_prefix, routename_prefix=None):
"""
A convenience method for registering the routes and views in pyramid.
This automatically adds a list and detail endpoint to your routes.
:param config: The pyramid ``Configurator`` object for your app.
:type config: ``pyramid.config.Configurator``
:param rule_prefix: The start of the URL to handle.
:type rule_prefix: string
:param routename_prefix: (Optional) A prefix for the route's name.
The default is ``None``, which will autocreate a prefix based on the
class name. Ex: ``PostResource`` -> ``api_post_list``
:type routename_prefix: string
:returns: ``pyramid.config.Configurator``
"""
methods = ('GET', 'POST', 'PUT', 'DELETE')
config.add_route(
cls.build_routename('list', routename_prefix),
rule_prefix
)
config.add_view(
cls.as_list(),
route_name=cls.build_routename('list', routename_prefix),
request_method=methods
)
config.add_route(
cls.build_routename('detail', routename_prefix),
rule_prefix + '{name}/'
)
config.add_view(
cls.as_detail(),
route_name=cls.build_routename('detail', routename_prefix),
request_method=methods
)
return config
########NEW FILE########
__FILENAME__ = resources
import six
import sys
from .constants import OK, CREATED, ACCEPTED, NO_CONTENT
from .data import Data
from .exceptions import MethodNotImplemented, Unauthorized
from .preparers import Preparer, FieldsPreparer
from .serializers import JSONSerializer
from .utils import format_traceback
def skip_prepare(func):
"""
A convenience decorator for indicating the raw data should not be prepared.
"""
def _wrapper(self, *args, **kwargs):
value = func(self, *args, **kwargs)
return Data(value, should_prepare=False)
return _wrapper
class Resource(object):
"""
Defines a RESTful resource.
Users are expected to subclass this object & implement a handful of methods:
* ``list``
* ``detail``
* ``create`` (requires authentication)
* ``update`` (requires authentication)
* ``delete`` (requires authentication)
Additionally, the user may choose to implement:
* ``create_detail`` (requires authentication)
* ``update_list`` (requires authentication)
* ``delete_list`` (requires authentication)
Users may also wish to define a ``fields`` attribute on the class. By
providing a dictionary of output names mapped to a dotted lookup path, you
can control the serialized output.
Users may also choose to override the ``status_map`` and/or ``http_methods``
on the class. These respectively control the HTTP status codes returned by
the views and the way views are looked up (based on HTTP method & endpoint).
"""
status_map = {
'list': OK,
'detail': OK,
'create': CREATED,
'update': ACCEPTED,
'delete': NO_CONTENT,
'update_list': ACCEPTED,
'create_detail': CREATED,
'delete_list': NO_CONTENT,
}
http_methods = {
'list': {
'GET': 'list',
'POST': 'create',
'PUT': 'update_list',
'DELETE': 'delete_list',
},
'detail': {
'GET': 'detail',
'POST': 'create_detail',
'PUT': 'update',
'DELETE': 'delete',
}
}
preparer = Preparer()
serializer = JSONSerializer()
def __init__(self, *args, **kwargs):
self.init_args = args
self.init_kwargs = kwargs
self.request = None
self.data = None
self.status = 200
@classmethod
def as_list(cls, *init_args, **init_kwargs):
"""
Used for hooking up the actual list-style endpoints, this returns a
wrapper function that creates a new instance of the resource class &
calls the correct view method for it.
:param init_args: (Optional) Positional params to be persisted along
for instantiating the class itself.
:param init_kwargs: (Optional) Keyword params to be persisted along
for instantiating the class itself.
:returns: View function
"""
return cls.as_view('list', *init_args, **init_kwargs)
@classmethod
def as_detail(cls, *init_args, **init_kwargs):
"""
Used for hooking up the actual detail-style endpoints, this returns a
wrapper function that creates a new instance of the resource class &
calls the correct view method for it.
:param init_args: (Optional) Positional params to be persisted along
for instantiating the class itself.
:param init_kwargs: (Optional) Keyword params to be persisted along
for instantiating the class itself.
:returns: View function
"""
return cls.as_view('detail', *init_args, **init_kwargs)
@classmethod
def as_view(cls, view_type, *init_args, **init_kwargs):
"""
Used for hooking up the all endpoints (including custom ones), this
returns a wrapper function that creates a new instance of the resource
class & calls the correct view method for it.
:param view_type: Should be one of ``list``, ``detail`` or ``custom``.
:type view_type: string
:param init_args: (Optional) Positional params to be persisted along
for instantiating the class itself.
:param init_kwargs: (Optional) Keyword params to be persisted along
for instantiating the class itself.
:returns: View function
"""
def _wrapper(request, *args, **kwargs):
# Make a new instance so that no state potentially leaks between
# instances.
inst = cls(*init_args, **init_kwargs)
inst.request = request
return inst.handle(view_type, *args, **kwargs)
return _wrapper
def request_method(self):
"""
Returns the HTTP method for the current request.
The default implementation is Django-specific, so if you're integrating
with a new web framework, you'll need to override this method within
your subclass.
:returns: The HTTP method in uppercase
:rtype: string
"""
# By default, Django-esque.
return self.request.method.upper()
def request_body(self):
"""
Returns the body of the current request.
Useful for deserializing the content the user sent (typically JSON).
The default implementation is Django-specific, so if you're integrating
with a new web framework, you'll need to override this method within
your subclass.
:returns: The body of the request
:rtype: string
"""
# By default, Django-esque.
return self.request.body
def build_response(self, data, status=200):
"""
Given some data, generates an HTTP response.
The default implementation is Django-specific, so if you're integrating
with a new web framework, you'll need to override this method within
your subclass.
:param data: The body of the response to send
:type data: string
:param status: (Optional) The status code to respond with. Default is
``200``
:type status: integer
:returns: A response object
"""
# TODO: Remove the Django.
# This should be plain old WSGI by default, if possible.
# By default, Django-esque.
from django.http import HttpResponse
resp = HttpResponse(data, content_type='application/json')
resp.status_code = status
return resp
def build_error(self, err):
"""
When an exception is encountered, this generates a JSON error message
for display to the user.
:param err: The exception seen. The message is exposed to the user, so
beware of sensitive data leaking.
:type err: Exception
:returns: A response object
"""
data = {
'error': six.text_type(err),
}
if self.is_debug():
# Add the traceback.
data['traceback'] = format_traceback(sys.exc_info())
body = self.serializer.serialize(data)
status = getattr(err, 'status', 500)
return self.build_response(body, status=status)
def is_debug(self):
"""
Controls whether or not the resource is in a debug environment.
If so, tracebacks will be added to the serialized response.
The default implementation simply returns ``False``, so if you're
integrating with a new web framework, you'll need to override this
method within your subclass.
:returns: If the resource is in a debug environment
:rtype: boolean
"""
return False
def bubble_exceptions(self):
"""
Controls whether or not exceptions will be re-raised when encountered.
The default implementation returns ``False``, which means errors should
return a serialized response.
If you'd like exceptions to be re-raised, override this method & return
``True``.
:returns: Whether exceptions should be re-raised or not
:rtype: boolean
"""
return False
def handle(self, endpoint, *args, **kwargs):
"""
A convenient dispatching method, this centralized some of the common
flow of the views.
This wraps/calls the methods the user defines (``list/detail/create``
etc.), allowing the user to ignore the
authentication/deserialization/serialization/response & just focus on
their data/interactions.
:param endpoint: The style of URI call (typically either ``list`` or
``detail``).
:type endpoint: string
:param args: (Optional) Any positional URI parameter data is passed
along here. Somewhat framework/URL-specific.
:param kwargs: (Optional) Any keyword/named URI parameter data is
passed along here. Somewhat framework/URL-specific.
:returns: A response object
"""
method = self.request_method()
try:
# Use ``.get()`` so we can also dodge potentially incorrect
# ``endpoint`` errors as well.
if not method in self.http_methods.get(endpoint, {}):
raise MethodNotImplemented(
"Unsupported method '{0}' for {1} endpoint.".format(
method,
endpoint
)
)
if not self.is_authenticated():
raise Unauthorized()
self.data = self.deserialize(method, endpoint, self.request_body())
view_method = getattr(self, self.http_methods[endpoint][method])
data = view_method(*args, **kwargs)
serialized = self.serialize(method, endpoint, data)
except Exception as err:
return self.handle_error(err)
status = self.status_map.get(self.http_methods[endpoint][method], OK)
return self.build_response(serialized, status=status)
def handle_error(self, err):
"""
When an exception is encountered, this generates a serialized error
message to return the user.
:param err: The exception seen. The message is exposed to the user, so
beware of sensitive data leaking.
:type err: Exception
:returns: A response object
"""
if self.bubble_exceptions():
raise err
return self.build_error(err)
def deserialize(self, method, endpoint, body):
"""
A convenience method for deserializing the body of a request.
If called on a list-style endpoint, this calls ``deserialize_list``.
Otherwise, it will call ``deserialize_detail``.
:param method: The HTTP method of the current request
:type method: string
:param endpoint: The endpoint style (``list`` or ``detail``)
:type endpoint: string
:param body: The body of the current request
:type body: string
:returns: The deserialized data
:rtype: ``list`` or ``dict``
"""
if endpoint == 'list':
return self.deserialize_list(body)
return self.deserialize_detail(body)
def deserialize_list(self, body):
"""
Given a string of text, deserializes a (presumed) list out of the body.
:param body: The body of the current request
:type body: string
:returns: The deserialized body or an empty ``list``
"""
if body:
return self.serializer.deserialize(body)
return []
def deserialize_detail(self, body):
"""
Given a string of text, deserializes a (presumed) object out of the body.
:param body: The body of the current request
:type body: string
:returns: The deserialized body or an empty ``dict``
"""
if body:
return self.serializer.deserialize(body)
return {}
def serialize(self, method, endpoint, data):
"""
A convenience method for serializing data for a response.
If called on a list-style endpoint, this calls ``serialize_list``.
Otherwise, it will call ``serialize_detail``.
:param method: The HTTP method of the current request
:type method: string
:param endpoint: The endpoint style (``list`` or ``detail``)
:type endpoint: string
:param data: The body for the response
:type data: string
:returns: A serialized version of the data
:rtype: string
"""
if endpoint == 'list':
# Create is a special-case, because you POST it to the collection,
# not to a detail.
if method == 'POST':
return self.serialize_detail(data)
return self.serialize_list(data)
return self.serialize_detail(data)
def serialize_list(self, data):
"""
Given a collection of data (``objects`` or ``dicts``), serializes them.
:param data: The collection of items to serialize
:type data: list or iterable
:returns: The serialized body
:rtype: string
"""
if data is None:
return ''
# Check for a ``Data``-like object. We should assume ``True`` (all
# data gets prepared) unless it's explicitly marked as not.
if not getattr(data, 'should_prepare', True):
prepped_data = data.value
else:
prepped_data = [self.prepare(item) for item in data]
final_data = self.wrap_list_response(prepped_data)
return self.serializer.serialize(final_data)
def serialize_detail(self, data):
"""
Given a single item (``object`` or ``dict``), serializes it.
:param data: The item to serialize
:type data: object or dict
:returns: The serialized body
:rtype: string
"""
if data is None:
return ''
# Check for a ``Data``-like object. We should assume ``True`` (all
# data gets prepared) unless it's explicitly marked as not.
if not getattr(data, 'should_prepare', True):
prepped_data = data.value
else:
prepped_data = self.prepare(data)
return self.serializer.serialize(prepped_data)
def prepare(self, data):
"""
Given an item (``object`` or ``dict``), this will potentially go through
& reshape the output based on ``self.prepare_with`` object.
:param data: An item to prepare for serialization
:type data: object or dict
:returns: A potentially reshaped dict
:rtype: dict
"""
return self.preparer.prepare(data)
def wrap_list_response(self, data):
"""
Takes a list of data & wraps it in a dictionary (within the ``objects``
key).
For security in JSON responses, it's better to wrap the list results in
an ``object`` (due to the way the ``Array`` constructor can be attacked
in Javascript). See http://haacked.com/archive/2009/06/25/json-hijacking.aspx/
& similar for details.
Overridable to allow for modifying the key names, adding data (or just
insecurely return a plain old list if that's your thing).
:param data: A list of data about to be serialized
:type data: list
:returns: A wrapping dict
:rtype: dict
"""
return {
"objects": data
}
def is_authenticated(self):
"""
A simple hook method for controlling whether a request is authenticated
to continue.
By default, we only allow the safe ``GET`` methods. All others are
denied.
:returns: Whether the request is authenticated or not.
:rtype: boolean
"""
if self.request_method() == 'GET':
return True
return False
# Common methods the user should implement.
def list(self, *args, **kwargs):
"""
Returns the data for a GET on a list-style endpoint.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: A collection of data
:rtype: list or iterable
"""
raise MethodNotImplemented()
def detail(self, *args, **kwargs):
"""
Returns the data for a GET on a detail-style endpoint.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: An item
:rtype: object or dict
"""
raise MethodNotImplemented()
def create(self, *args, **kwargs):
"""
Allows for creating data via a POST on a list-style endpoint.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: May return the created item or ``None``
"""
raise MethodNotImplemented()
def update(self, *args, **kwargs):
"""
Updates existing data for a PUT on a detail-style endpoint.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: May return the updated item or ``None``
"""
raise MethodNotImplemented()
def delete(self, *args, **kwargs):
"""
Deletes data for a DELETE on a detail-style endpoint.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: ``None``
"""
raise MethodNotImplemented()
# Uncommon methods the user should implement.
# These have intentionally uglier method names, which reflects just how
# much harder they are to get right.
def update_list(self, *args, **kwargs):
"""
Updates the entire collection for a PUT on a list-style endpoint.
Uncommonly implemented due to the complexity & (varying) busines-logic
involved.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: A collection of data
:rtype: list or iterable
"""
raise MethodNotImplemented()
def create_detail(self, *args, **kwargs):
"""
Creates a subcollection of data for a POST on a detail-style endpoint.
Uncommonly implemented due to the rarity of having nested collections.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: A collection of data
:rtype: list or iterable
"""
raise MethodNotImplemented()
def delete_list(self, *args, **kwargs):
"""
Deletes *ALL* data in the collection for a DELETE on a list-style
endpoint.
Uncommonly implemented due to potential of trashing large datasets.
Implement with care.
**MUST BE OVERRIDDEN BY THE USER** - By default, this returns
``MethodNotImplemented``.
:returns: ``None``
"""
raise MethodNotImplemented()
########NEW FILE########
__FILENAME__ = serializers
from .utils import json, MoreTypesJSONEncoder
class Serializer(object):
"""
A base serialization class.
Defines the protocol expected of a serializer, but only raises
``NotImplementedError``.
Either subclass this or provide an object with the same
``deserialize/serialize`` methods on it.
"""
def deserialize(self, body):
"""
Handles deserializing data coming from the user.
Should return a plain Python data type (such as a dict or list)
containing the data.
:param body: The body of the current request
:type body: string
:returns: The deserialized data
:rtype: ``list`` or ``dict``
"""
raise NotImplementedError("Subclasses must implement this method.")
def serialize(self, data):
"""
Handles serializing data being sent to the user.
Should return a plain Python string containing the serialized data
in the appropriate format.
:param data: The body for the response
:type data: string
:returns: A serialized version of the data
:rtype: string
"""
raise NotImplementedError("Subclasses must implement this method.")
class JSONSerializer(Serializer):
def deserialize(self, body):
"""
The low-level deserialization.
Underpins ``deserialize``, ``deserialize_list`` &
``deserialize_detail``.
Has no built-in smarts, simply loads the JSON.
:param body: The body of the current request
:type body: string
:returns: The deserialized data
:rtype: ``list`` or ``dict``
"""
return json.loads(body)
def serialize(self, data):
"""
The low-level serialization.
Underpins ``serialize``, ``serialize_list`` &
``serialize_detail``.
Has no built-in smarts, simply dumps the JSON.
:param data: The body for the response
:type data: string
:returns: A serialized version of the data
:rtype: string
"""
return json.dumps(data, cls=MoreTypesJSONEncoder)
########NEW FILE########
__FILENAME__ = utils
import datetime
import decimal
import traceback
try:
import json
except ImportError:
import simplejson as json
class MoreTypesJSONEncoder(json.JSONEncoder):
"""
A JSON encoder that allows for more common Python data types.
In addition to the defaults handled by ``json``, this also supports:
* ``datetime.datetime``
* ``datetime.date``
* ``datetime.time``
* ``decimal.Decimal``
"""
def default(self, data):
if isinstance(data, (datetime.datetime, datetime.date, datetime.time)):
return data.isoformat()
elif isinstance(data, decimal.Decimal):
return str(data)
else:
return super(MoreTypesJSONEncoder, self).default(data)
def format_traceback(exc_info):
stack = traceback.format_stack()
stack = stack[:-2]
stack.extend(traceback.format_tb(exc_info[2]))
stack.extend(traceback.format_exception_only(exc_info[0], exc_info[1]))
stack_str = "Traceback (most recent call last):\n"
stack_str += "".join(stack)
# Remove the last \n
stack_str = stack_str[:-1]
return stack_str
########NEW FILE########
__FILENAME__ = fakes
class FakeHttpRequest(object):
def __init__(self, method='GET', body=''):
self.method = method.upper()
self.body = body
class FakeHttpResponse(object):
def __init__(self, body, content_type='text/html'):
self.body = body
self.content_type = content_type
self.status_code = 200
class FakeModel(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
########NEW FILE########
__FILENAME__ = test_dj
import unittest
from django.http import Http404
from django.core.exceptions import ObjectDoesNotExist
# Ugh. Settings for Django.
from django.conf import settings
settings.configure(DEBUG=True)
from restless.dj import DjangoResource
from restless.exceptions import Unauthorized
from restless.preparers import FieldsPreparer
from restless.resources import skip_prepare
from restless.utils import json
from .fakes import FakeHttpRequest, FakeModel
class DjTestResource(DjangoResource):
preparer = FieldsPreparer(fields={
'id': 'id',
'title': 'title',
'author': 'username',
'body': 'content'
})
fake_db = []
def __init__(self, *args, **kwargs):
super(DjTestResource, self).__init__(*args, **kwargs)
self.http_methods.update({
'schema': {
'GET': 'schema',
}
})
def fake_init(self):
# Just for testing.
self.__class__.fake_db = [
FakeModel(id=2, title='First post', username='daniel', content='Hello world!'),
FakeModel(id=4, title='Another', username='daniel', content='Stuff here.'),
FakeModel(id=5, title='Last', username='daniel', content="G'bye!"),
]
def is_authenticated(self):
if self.request_method() == 'DELETE':
return False
return True
def list(self):
return self.fake_db
def detail(self, pk):
for item in self.fake_db:
if item.id == pk:
return item
# If it wasn't found in our fake DB, raise a Django-esque exception.
raise ObjectDoesNotExist("Model with pk {0} not found.".format(pk))
def create(self):
self.fake_db.append(FakeModel(
**self.data
))
def update(self, pk):
for item in self.fake_db:
if item.id == pk:
for k, v in self.data:
setattr(item, k, v)
return
def create_detail(self):
raise ValueError("This is a random & crazy exception.")
@skip_prepare
def schema(self):
# A WILD SCHEMA VIEW APPEARS!
return {
'fields': {
'id': {
'type': 'integer',
'required': True,
'help_text': 'The unique id for the post',
},
'title': {
'type': 'string',
'required': True,
'help_text': "The post's title",
},
'author': {
'type': 'string',
'required': True,
'help_text': 'The username of the author of the post',
},
'body': {
'type': 'string',
'required': False,
'default': '',
'help_text': 'The content of the post',
}
},
'format': 'application/json',
'allowed_list_http_methods': ['GET', 'POST'],
'allowed_detail_http_methods': ['GET', 'PUT', 'DELETE'],
}
class DjTestResourceHttp404Handling(DjTestResource):
def detail(self, pk):
for item in self.fake_db:
if item.id == pk:
return item
# If it wasn't found in our fake DB, raise a Django-esque exception.
raise Http404("Model with pk {0} not found.".format(pk))
class DjangoResourceTestCase(unittest.TestCase):
def setUp(self):
super(DjangoResourceTestCase, self).setUp()
self.res = DjTestResource()
# Just for the fake data.
self.res.fake_init()
def test_as_list(self):
list_endpoint = DjTestResource.as_list()
req = FakeHttpRequest('GET')
resp = list_endpoint(req)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'objects': [
{
'author': 'daniel',
'body': 'Hello world!',
'id': 2,
'title': 'First post'
},
{
'author': 'daniel',
'body': 'Stuff here.',
'id': 4,
'title': 'Another'
},
{
'author': 'daniel',
'body': "G'bye!",
'id': 5,
'title': 'Last'
}
]
})
def test_as_detail(self):
detail_endpoint = DjTestResource.as_detail()
req = FakeHttpRequest('GET')
resp = detail_endpoint(req, 4)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'author': 'daniel',
'body': 'Stuff here.',
'id': 4,
'title': 'Another'
})
def test_as_view(self):
# This would be hooked up via the URLconf...
schema_endpoint = DjTestResource.as_view('schema')
req = FakeHttpRequest('GET')
resp = schema_endpoint(req)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
schema = json.loads(resp.content.decode('utf-8'))
self.assertEqual(
sorted(list(schema['fields'].keys())),
[
'author',
'body',
'id',
'title',
]
)
self.assertEqual(schema['fields']['id']['type'], 'integer')
self.assertEqual(schema['format'], 'application/json')
def test_handle_not_implemented(self):
self.res.request = FakeHttpRequest('TRACE')
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 501)
resp_json = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp_json['error'], "Unsupported method 'TRACE' for list endpoint.")
self.assertTrue('traceback' in resp_json)
def test_handle_not_authenticated(self):
# Special-cased above for testing.
self.res.request = FakeHttpRequest('DELETE')
# First with DEBUG on
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 401)
resp_json = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp_json['error'], 'Unauthorized.')
self.assertTrue('traceback' in resp_json)
# Now with DEBUG off.
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 401)
resp_json = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp_json, {
'error': 'Unauthorized.',
})
self.assertFalse('traceback' in resp_json)
# Last, with bubble_exceptions.
class Bubbly(DjTestResource):
def bubble_exceptions(self):
return True
with self.assertRaises(Unauthorized):
bubb = Bubbly()
bubb.request = FakeHttpRequest('DELETE')
bubb.handle('list')
def test_handle_build_err(self):
# Special-cased above for testing.
self.res.request = FakeHttpRequest('POST')
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = self.res.handle('detail')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 500)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'error': 'This is a random & crazy exception.'
})
def test_object_does_not_exist(self):
# Make sure we get a proper Not Found exception rather than a
# generic 500, when code raises a ObjectDoesNotExist exception.
self.res.request = FakeHttpRequest('GET')
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = self.res.handle('detail', 1001)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 404)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'error': 'Model with pk 1001 not found.'
})
def test_http404_exception_handling(self):
# Make sure we get a proper Not Found exception rather than a
# generic 500, when code raises a Http404 exception.
res = DjTestResourceHttp404Handling()
res.request = FakeHttpRequest('GET')
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = res.handle('detail', 1001)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 404)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'error': 'Model with pk 1001 not found.'
})
def test_build_url_name(self):
self.assertEqual(
DjTestResource.build_url_name('list'),
'api_djtest_list'
)
self.assertEqual(
DjTestResource.build_url_name('detail'),
'api_djtest_detail'
)
self.assertEqual(
DjTestResource.build_url_name('schema'),
'api_djtest_schema'
)
self.assertEqual(
DjTestResource.build_url_name('list', name_prefix='v2_'),
'v2_list'
)
self.assertEqual(
DjTestResource.build_url_name('detail', name_prefix='v2_'),
'v2_detail'
)
self.assertEqual(
DjTestResource.build_url_name('schema', name_prefix='v2_'),
'v2_schema'
)
def test_urls(self):
patterns = DjTestResource.urls()
self.assertEqual(len(patterns), 2)
self.assertEqual(patterns[0].name, 'api_djtest_list')
self.assertEqual(patterns[1].name, 'api_djtest_detail')
patterns = DjTestResource.urls(name_prefix='v2_tests')
self.assertEqual(len(patterns), 2)
self.assertEqual(patterns[0].name, 'v2_tests_list')
self.assertEqual(patterns[1].name, 'v2_tests_detail')
def test_create(self):
self.res.request = FakeHttpRequest('POST', body='{"id": 6, "title": "Moved hosts", "author": "daniel"}')
self.assertEqual(len(self.res.fake_db), 3)
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.content.decode('utf-8'), '')
# Check the internal state.
self.assertEqual(len(self.res.fake_db), 4)
self.assertEqual(self.res.data, {
'author': 'daniel',
'id': 6,
'title': 'Moved hosts'
})
########NEW FILE########
__FILENAME__ = test_fl
import unittest
# Ugh. Globals for Flask.
import flask
from restless.fl import FlaskResource
from restless.utils import json
from .fakes import FakeHttpRequest
class FlTestResource(FlaskResource):
fake_db = []
def fake_init(self):
# Just for testing.
self.__class__.fake_db = [
{"id": 2, "title": 'First post'},
{"id": 4, "title": 'Another'},
{"id": 5, "title": 'Last'},
]
def list(self):
return self.fake_db
def detail(self, pk):
for item in self.fake_db:
if item['id'] == pk:
return item
def create(self):
self.fake_db.append(self.data)
class FlaskResourceTestCase(unittest.TestCase):
def setUp(self):
super(FlaskResourceTestCase, self).setUp()
self.res = FlTestResource()
self.app = flask.Flask('test_restless')
self.app.config['DEBUG'] = True
# Just for the fake data.
self.res.fake_init()
def test_as_list(self):
list_endpoint = FlTestResource.as_list()
flask.request = FakeHttpRequest('GET')
with self.app.test_request_context('/whatever/', method='GET'):
resp = list_endpoint()
self.assertEqual(resp.headers['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.data.decode('utf-8')), {
'objects': [
{
'id': 2,
'title': 'First post'
},
{
'id': 4,
'title': 'Another'
},
{
'id': 5,
'title': 'Last'
}
]
})
def test_as_detail(self):
detail_endpoint = FlTestResource.as_detail()
flask.request = FakeHttpRequest('GET')
with self.app.test_request_context('/whatever/', method='GET'):
resp = detail_endpoint(4)
self.assertEqual(resp.headers['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.data.decode('utf-8')), {
'id': 4,
'title': 'Another'
})
def test_is_debug(self):
with self.app.test_request_context('/whatever/', method='GET'):
self.assertTrue(self.res.is_debug())
with self.app.test_request_context('/whatever/', method='GET'):
self.app.debug = False
# This should do the correct lookup.
self.assertFalse(self.res.is_debug())
def test_build_response(self):
with self.app.test_request_context('/whatever/', method='GET'):
resp = self.res.build_response('Hello, world!', status=302)
self.assertEqual(resp.status_code, 302)
self.assertEqual(resp.headers['Content-Type'], 'application/json')
self.assertEqual(resp.data.decode('utf-8'), 'Hello, world!')
def test_add_url_rules(self):
with self.app.test_request_context('/whatever/', method='GET'):
FlTestResource.add_url_rules(self.app, '/api/')
rules = sorted([rule.endpoint for rule in self.app.url_map.iter_rules()])
self.assertEqual(len(rules), 3)
self.assertEqual(rules[0], 'api_fltest_detail')
self.assertEqual(rules[1], 'api_fltest_list')
FlTestResource.add_url_rules(self.app, '/api/', endpoint_prefix='v2_tests')
rules = sorted([rule.endpoint for rule in self.app.url_map.iter_rules()])
self.assertEqual(len(rules), 5)
self.assertEqual(rules[3], 'v2_tests_detail')
self.assertEqual(rules[4], 'v2_tests_list')
########NEW FILE########
__FILENAME__ = test_it
import unittest
try:
import itty
from restless.it import IttyResource
except ImportError:
itty = None
IttyResource = object
from restless.utils import json
from .fakes import FakeHttpRequest
class ItTestResource(IttyResource):
fake_db = []
def fake_init(self):
# Just for testing.
self.__class__.fake_db = [
{"id": 2, "title": 'First post'},
{"id": 4, "title": 'Another'},
{"id": 5, "title": 'Last'},
]
def list(self):
return self.fake_db
def detail(self, pk):
for item in self.fake_db:
if item['id'] == pk:
return item
def create(self):
self.fake_db.append(self.data)
@unittest.skipIf(not itty, "itty is not available")
class IttyResourceTestCase(unittest.TestCase):
def setUp(self):
super(IttyResourceTestCase, self).setUp()
self.res = ItTestResource()
# Just for the fake data.
self.res.fake_init()
def test_as_list(self):
list_endpoint = ItTestResource.as_list()
request = FakeHttpRequest('GET')
resp = list_endpoint(request)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status, 200)
self.assertEqual(json.loads(resp.output), {
'objects': [
{
'id': 2,
'title': 'First post'
},
{
'id': 4,
'title': 'Another'
},
{
'id': 5,
'title': 'Last'
}
]
})
def test_as_detail(self):
detail_endpoint = ItTestResource.as_detail()
request = FakeHttpRequest('GET')
resp = detail_endpoint(request, 4)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status, 200)
self.assertEqual(json.loads(resp.output), {
'id': 4,
'title': 'Another'
})
def test_is_debug(self):
self.assertFalse(self.res.is_debug())
self.res.debug = True
self.addCleanup(setattr, self.res, 'debug', False)
self.assertTrue(self.res.is_debug())
def test_build_response(self):
resp = self.res.build_response('Hello, world!', status=302)
self.assertEqual(resp.status, 302)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.output, 'Hello, world!')
def test_setup_urls(self):
self.assertEqual(len(itty.REQUEST_MAPPINGS['GET']), 0)
self.assertEqual(len(itty.REQUEST_MAPPINGS['POST']), 0)
self.assertEqual(len(itty.REQUEST_MAPPINGS['PUT']), 0)
self.assertEqual(len(itty.REQUEST_MAPPINGS['DELETE']), 0)
ItTestResource.setup_urls('/test')
self.assertEqual(len(itty.REQUEST_MAPPINGS['GET']), 2)
self.assertEqual(len(itty.REQUEST_MAPPINGS['POST']), 2)
self.assertEqual(len(itty.REQUEST_MAPPINGS['PUT']), 2)
self.assertEqual(len(itty.REQUEST_MAPPINGS['DELETE']), 2)
self.assertEqual(itty.REQUEST_MAPPINGS['GET'][0][1], '/test/')
self.assertEqual(itty.REQUEST_MAPPINGS['GET'][1][1], '/test/(?P<pk>\\d+)/')
########NEW FILE########
__FILENAME__ = test_preparers
import unittest
from restless.preparers import Preparer, FieldsPreparer
class InstaObj(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class LookupDataTestCase(unittest.TestCase):
def setUp(self):
super(LookupDataTestCase, self).setUp()
self.preparer = FieldsPreparer(fields=None)
self.obj_data = InstaObj(
say='what',
count=453,
moof={
'buried': {
'id': 7,
'data': InstaObj(yes='no')
}
}
)
self.dict_data = {
'hello': 'world',
'abc': 123,
'more': {
'things': 'here',
'nested': InstaObj(
awesome=True,
depth=3
),
},
}
def test_dict_simple(self):
self.assertEqual(self.preparer.lookup_data('hello', self.dict_data), 'world')
self.assertEqual(self.preparer.lookup_data('abc', self.dict_data), 123)
def test_obj_simple(self):
self.assertEqual(self.preparer.lookup_data('say', self.obj_data), 'what')
self.assertEqual(self.preparer.lookup_data('count', self.obj_data), 453)
def test_dict_nested(self):
self.assertEqual(self.preparer.lookup_data('more.things', self.dict_data), 'here')
self.assertEqual(self.preparer.lookup_data('more.nested.depth', self.dict_data), 3)
def test_obj_nested(self):
self.assertEqual(self.preparer.lookup_data('moof.buried.id', self.obj_data), 7)
self.assertEqual(self.preparer.lookup_data('moof.buried.data.yes', self.obj_data), 'no')
def test_dict_miss(self):
with self.assertRaises(KeyError):
self.preparer.lookup_data('another', self.dict_data)
def test_obj_miss(self):
with self.assertRaises(AttributeError):
self.preparer.lookup_data('whee', self.obj_data)
def test_empty_lookup(self):
# We could possibly get here in the recursion.
self.assertEqual(self.preparer.lookup_data('', 'Last value'), 'Last value')
def test_complex_miss(self):
with self.assertRaises(AttributeError):
self.preparer.lookup_data('more.nested.nope', self.dict_data)
########NEW FILE########
__FILENAME__ = test_pyr
import unittest
from pyramid import testing
from restless.pyr import PyramidResource
from restless.utils import json
from .fakes import FakeHttpRequest, FakeHttpResponse
class PyrTestResource(PyramidResource):
fake_db = []
def fake_init(self):
# Just for testing.
self.__class__.fake_db = [
{"id": 2, "title": 'First post'},
{"id": 4, "title": 'Another'},
{"id": 5, "title": 'Last'},
]
def list(self):
return self.fake_db
def detail(self, name):
for item in self.fake_db:
if item['id'] == name:
return item
def create(self):
self.fake_db.append(self.data)
def is_authenticated(self):
if self.request_method() == 'DELETE':
return False
return True
class PyramidResourceTestCase(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.res = PyrTestResource()
self.res.fake_init()
def test_as_list(self):
list_endpoint = PyrTestResource.as_list()
req = FakeHttpRequest('GET')
resp = list_endpoint(req)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.body.decode('utf-8')), {
'objects': [
{
'id': 2,
'title': 'First post'
},
{
'id': 4,
'title': 'Another'
},
{
'id': 5,
'title': 'Last'
}
]
})
def test_as_detail(self):
detail_endpoint = PyrTestResource.as_detail()
req = testing.DummyRequest()
req = FakeHttpRequest('GET')
req.matchdict = {'name': 4}
resp = detail_endpoint(req)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.body.decode('utf-8')), {
'id': 4,
'title': 'Another'
})
def test_handle_not_authenticated(self):
# Special-cased above for testing.
self.res.request = FakeHttpRequest('DELETE')
resp = self.res.handle('list')
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 401)
self.assertEqual(resp.body.decode('utf-8'), '{"error": "Unauthorized."}')
def test_add_views(self):
config = PyrTestResource.add_views(self.config, '/users/')
routes = config.get_routes_mapper().get_routes()
self.assertEqual(len(routes), 2)
self.assertEqual([r.name for r in routes], ['api_pyrtest_list', 'api_pyrtest_detail'])
self.assertEqual([r.path for r in routes], ['/users/', '/users/{name}/'])
def test_create(self):
self.res.request = FakeHttpRequest('POST', body='{"id": 6, "title": "Moved hosts"}')
self.assertEqual(len(self.res.fake_db), 3)
resp = self.res.handle('list')
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.body.decode('utf-8'), '')
# Check the internal state.
self.assertEqual(len(self.res.fake_db), 4)
self.assertEqual(self.res.data, {
'id': 6,
'title': 'Moved hosts'
})
########NEW FILE########
__FILENAME__ = test_resources
import datetime
import decimal
import unittest
from restless.exceptions import HttpError, NotFound, MethodNotImplemented
from restless.preparers import Preparer, FieldsPreparer
from restless.resources import Resource
from restless.utils import json
from .fakes import FakeHttpRequest, FakeHttpResponse
class NonDjangoResource(Resource):
# Because the default implementation is a tiny-bit Django-specific,
# we're faking some things here.
def build_response(self, data, status=200):
resp = FakeHttpResponse(data, content_type='application/json')
resp.status_code = status
return resp
class ResourceTestCase(unittest.TestCase):
resource_class = NonDjangoResource
def setUp(self):
super(ResourceTestCase, self).setUp()
self.res = self.resource_class()
# Assign here, since we typically won't be entering through
# ``as_list/as_detail`` methods like normal flow.
self.res.request = FakeHttpRequest()
def test_init(self):
res = self.resource_class('abc', test=True)
self.assertEqual(res.init_args, ('abc',))
self.assertEqual(res.init_kwargs, {'test': True})
self.assertEqual(res.request, None)
self.assertEqual(res.data, None)
self.assertEqual(res.status, 200)
def test_request_method(self):
self.assertEqual(self.res.request_method(), 'GET')
self.res.request = FakeHttpRequest('POST', '{"hello": "world"}')
self.assertEqual(self.res.request_method(), 'POST')
self.res.request = FakeHttpRequest('PUT', '{"hello": "world"}')
self.assertEqual(self.res.request_method(), 'PUT')
self.res.request = FakeHttpRequest('DELETE', '')
self.assertEqual(self.res.request_method(), 'DELETE')
def test_request_body(self):
self.assertEqual(self.res.request_body(), '')
self.res.request = FakeHttpRequest('POST', '{"hello": "world"}')
self.assertEqual(self.res.request_body(), '{"hello": "world"}')
self.res.request = FakeHttpRequest('PUT', '{"hello": "world"}')
self.assertEqual(self.res.request_body(), '{"hello": "world"}')
self.res.request = FakeHttpRequest('DELETE', '{}')
self.assertEqual(self.res.request_body(), '{}')
def test_build_response(self):
resp = self.res.build_response('Hello, world!')
self.assertEqual(resp.body, 'Hello, world!')
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 200)
resp = self.res.build_response('{"hello": "world"}', status=302)
self.assertEqual(resp.body, '{"hello": "world"}')
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 302)
def test_build_error(self):
err = HttpError("Whoopsie")
resp = self.res.build_error(err)
resp_body = json.loads(resp.body)
self.assertEqual(resp_body, {'error': 'Whoopsie'})
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 500)
nf_err = NotFound()
resp = self.res.build_error(nf_err)
resp_body = json.loads(resp.body)
# Default error message.
self.assertEqual(resp_body, {'error': 'Resource not found.'})
self.assertEqual(resp.content_type, 'application/json')
# Custom status code.
self.assertEqual(resp.status_code, 404)
# Non-restless exception.
unknown_err = AttributeError("'something' not found on the object.")
resp = self.res.build_error(unknown_err)
resp_body = json.loads(resp.body)
# Still gets the JSON treatment & an appropriate status code.
self.assertEqual(resp_body, {'error': "'something' not found on the object."})
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.status_code, 500)
def test_is_debug(self):
self.assertFalse(self.res.is_debug())
def test_bubble_exceptions(self):
self.assertFalse(self.res.bubble_exceptions())
def test_deserialize(self):
list_body = '["one", "three", "two"]'
self.assertEqual(self.res.deserialize('POST', 'list', list_body), [
"one",
"three",
"two",
])
# Should select list.
self.assertEqual(self.res.deserialize('POST', 'list', ''), [])
# Should select detail.
self.assertEqual(self.res.deserialize('PUT', 'detail', ''), {})
def test_deserialize_list(self):
body = '["one", "three", "two"]'
self.assertEqual(self.res.deserialize_list(body), [
"one",
"three",
"two",
])
self.assertEqual(self.res.deserialize_list(''), [])
def test_deserialize_detail(self):
body = '{"title": "Hitchhiker\'s Guide To The Galaxy", "author": "Douglas Adams"}'
self.assertEqual(self.res.deserialize_detail(body), {
'author': 'Douglas Adams',
'title': "Hitchhiker's Guide To The Galaxy",
})
self.assertEqual(self.res.deserialize_detail(''), {})
def test_serialize(self):
list_data = ['a', 'c', 'b']
detail_data = {'hello': 'world'}
# Normal calls.
self.assertEqual(self.res.serialize('GET', 'list', list_data), '{"objects": ["a", "c", "b"]}')
self.assertEqual(self.res.serialize('GET', 'detail', detail_data), '{"hello": "world"}')
# The create special-case.
self.assertEqual(self.res.serialize('POST', 'list', detail_data), '{"hello": "world"}')
# Make sure other methods aren't special-cased.
self.assertEqual(self.res.serialize('PUT', 'list', list_data), '{"objects": ["a", "c", "b"]}')
def test_serialize_list(self):
data = [
{
'title': 'Cosmos',
'author': 'Carl Sagan',
'short_desc': 'A journey through the stars by an emminent astrophysist.',
'pub_date': '1980',
},
{
'title': "Hitchhiker's Guide To The Galaxy",
'author': 'Douglas Adams',
'short_desc': "Don't forget your towel.",
'pub_date': '1979',
}
]
self.res.preparer = FieldsPreparer(fields={
'title': 'title',
'author': 'author',
'synopsis': 'short_desc',
})
res = self.res.serialize_list(data)
self.assertEqual(json.loads(res), {
'objects': [
{
'author': 'Carl Sagan',
'synopsis': 'A journey through the stars by an emminent astrophysist.',
'title': 'Cosmos'
},
{
'title': "Hitchhiker's Guide To The Galaxy",
'author': 'Douglas Adams',
'synopsis': "Don't forget your towel.",
},
],
})
# Make sure we don't try to serialize a ``None``, which would fail.
self.assertEqual(self.res.serialize_list(None), '')
def test_serialize_detail(self):
# This isn't very unit-y, but we're also testing that we're using the
# right JSON encoder & that it can handle other data types.
data = {
'title': 'Cosmos',
'author': 'Carl Sagan',
'short_desc': 'A journey through the stars by an emminent astrophysist.',
}
self.res.preparer = FieldsPreparer(fields={
'title': 'title',
'author': 'author',
'synopsis': 'short_desc',
})
res = self.res.serialize_detail(data)
self.assertEqual(json.loads(res), {
'author': 'Carl Sagan',
'synopsis': 'A journey through the stars by an emminent astrophysist.',
'title': 'Cosmos'
})
# Make sure we don't try to serialize a ``None``, which would fail.
self.assertEqual(self.res.serialize_detail(None), '')
def test_prepare(self):
# Without fields.
data = {
'title': 'Cosmos',
'author': 'Carl Sagan',
'short_desc': 'A journey through the stars by an emminent astrophysist.',
'pub_date': '1980'
}
# Should be unmodified.
self.assertTrue(isinstance(self.res.preparer, Preparer))
self.assertEqual(self.res.prepare(data), data)
self.res.preparer = FieldsPreparer(fields={
'title': 'title',
'author': 'author',
'synopsis': 'short_desc',
})
self.assertEqual(self.res.prepare(data), {
'author': 'Carl Sagan',
'synopsis': 'A journey through the stars by an emminent astrophysist.',
'title': 'Cosmos'
})
def test_wrap_list_response(self):
data = ['one', 'three', 'two']
self.assertEqual(self.res.wrap_list_response(data), {
'objects': [
'one',
'three',
'two',
],
})
def test_is_authenticated(self):
# By default, only GETs are allowed.
self.assertTrue(self.res.is_authenticated())
self.res.request = FakeHttpRequest('POST')
self.assertFalse(self.res.is_authenticated())
self.res.request = FakeHttpRequest('PUT')
self.assertFalse(self.res.is_authenticated())
self.res.request = FakeHttpRequest('DELETE')
self.assertFalse(self.res.is_authenticated())
def test_list(self):
with self.assertRaises(MethodNotImplemented):
self.res.list()
def test_detail(self):
with self.assertRaises(MethodNotImplemented):
self.res.detail()
def test_create(self):
with self.assertRaises(MethodNotImplemented):
self.res.create()
def test_update(self):
with self.assertRaises(MethodNotImplemented):
self.res.update()
def test_delete(self):
with self.assertRaises(MethodNotImplemented):
self.res.delete()
def test_update_list(self):
with self.assertRaises(MethodNotImplemented):
self.res.update_list()
def test_create_detail(self):
with self.assertRaises(MethodNotImplemented):
self.res.create_detail()
def test_delete_list(self):
with self.assertRaises(MethodNotImplemented):
self.res.delete_list()
########NEW FILE########
__FILENAME__ = test_serializers
import datetime
from decimal import Decimal
import unittest
from restless.serializers import JSONSerializer
class JSONSerializerTestCase(unittest.TestCase):
def setUp(self):
super(JSONSerializerTestCase, self).setUp()
self.serializer = JSONSerializer()
self.dict_data = {
'hello': 'world',
'abc': 123,
'more': {
'things': 'here',
# Some data the usual JSON encoder can't handle...
'nested': datetime.datetime(2014, 3, 30, 12, 55, 15),
'again': Decimal('18.9'),
},
}
def test_serialize(self):
body = self.serializer.serialize(self.dict_data)
self.assertTrue('"hello": "world"' in body)
self.assertTrue('"abc": 123' in body)
self.assertTrue('"nested": "2014-03-30T12:55:15"' in body)
self.assertTrue('"again": "18.9"' in body)
def test_deserialize(self):
self.assertEqual(self.serializer.deserialize('{"more": "things"}'), {
'more': 'things',
})
########NEW FILE########
__FILENAME__ = test_utils
import sys
import unittest
from restless.utils import format_traceback
class FormatTracebackTestCase(unittest.TestCase):
def test_format_traceback(self):
try:
raise ValueError("Because we need an exception.")
except:
exc_info = sys.exc_info()
result = format_traceback(exc_info)
self.assertTrue(result.startswith('Traceback (most recent call last):\n'))
self.assertFalse(result.endswith('\n'))
lines = result.split('\n')
self.assertTrue(len(lines) > 3)
self.assertEqual(lines[-1], 'ValueError: Because we need an exception.')
########NEW FILE########
| [
"dyangUCI@github.com"
] | dyangUCI@github.com |
335250ce21813f53a167005fc2ebda740610ebd1 | d1fb76c0fdb08dc998a01e8eeca8bd8806db82a0 | /onionstudio/manual.py | b0e6ba28292f0d1c24853ba9d0a8445831e24cc1 | [
"MIT"
] | permissive | jarret/onionstudio | 822613dec0f72ef5db717ffdc8761dccb912de43 | 5ebf0a75cf1e7960822c96a987668be5ed82aa41 | refs/heads/master | 2020-12-03T08:45:44.808961 | 2020-07-18T16:49:57 | 2020-07-18T16:49:57 | 231,258,649 | 11 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py | # Copyright (c) 2020 Jarret Dyrbye
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php
import sys
from onionstudio.pixel import Pixel
class ManualToPixels:
def __init__(self, pixels_string):
self.pixels_string = pixels_string
def parse_pixels(self):
string_tokens = self.pixels_string.split("_")
if len(string_tokens) == 0:
return None, "no pixels given"
if len(string_tokens) % 3 != 0:
return None, 'could not parse "%s" as pixels' % self.pixels_string
pixels = []
for i in range(0, len(string_tokens), 3):
pixel_tokens = string_tokens[i:i+3]
try:
x = int(pixel_tokens[0])
y = int(pixel_tokens[1])
rgb = pixel_tokens[2]
pixels.append(Pixel(x, y, rgb))
except:
return None, "could not interpret %s as pixel" % pixel_tokens
return pixels, None
| [
"jarret.dyrbye@gmail.com"
] | jarret.dyrbye@gmail.com |
4855f4a63ce71ad5e80edbc3a6a41419c24b5520 | 13ea58f72fa96e2455609fb452b5f3b98e94f846 | /examples/diffusion/poisson_field_dependent_material.py | 4f2225b3311fd8d5dd012e8c40b8f44a9b27ba83 | [
"BSD-3-Clause"
] | permissive | vondrejc/sfepy | 4284ee47979b89d9e504b72b91689a9ce0c3a5ec | 8e427af699c4b2858eb096510057abb3ae7e28e8 | refs/heads/master | 2021-01-24T00:09:18.722674 | 2014-08-20T12:37:03 | 2014-08-20T14:25:56 | 12,810,199 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,370 | py | r"""
Laplace equation with a field-dependent material parameter.
Find :math:`T(t)` for :math:`t \in [0, t_{\rm final}]` such that:
.. math::
\int_{\Omega} c(T) \nabla s \cdot \nabla T
= 0
\;, \quad \forall s \;.
where :math:`c(T)` is the :math:`T` dependent diffusion coefficient.
Each iteration calculates :math:`T` and adjusts :math:`c(T)`.
"""
from sfepy import data_dir
from sfepy.base.base import output
filename_mesh = data_dir + '/meshes/3d/cylinder.mesh'
t0 = 0.0
t1 = 0.1
n_step = 11
def get_conductivity(ts, coors, problem, equations=None, mode=None, **kwargs):
"""
Calculates the conductivity as 2+10*T and returns it.
This relation results in larger T gradients where T is small.
"""
if mode == 'qp':
# T-field values in quadrature points coordinates given by integral i
# - they are the same as in `coors` argument.
T_values = problem.evaluate('ev_volume_integrate.i.Omega(T)',
mode='qp', verbose=False)
val = 2 + 10 * (T_values + 2)
output('conductivity: min:', val.min(), 'max:', val.max())
val.shape = (val.shape[0] * val.shape[1], 1, 1)
return {'val' : val}
materials = {
'coef' : 'get_conductivity',
}
fields = {
'temperature' : ('real', 1, 'Omega', 1),
}
variables = {
'T' : ('unknown field', 'temperature', 0),
's' : ('test field', 'temperature', 'T'),
}
regions = {
'Omega' : 'all',
'Gamma_Left' : ('vertices in (x < 0.00001)', 'facet'),
'Gamma_Right' : ('vertices in (x > 0.099999)', 'facet'),
}
ebcs = {
'T1' : ('Gamma_Left', {'T.0' : 2.0}),
'T2' : ('Gamma_Right', {'T.0' : -2.0}),
}
functions = {
'get_conductivity' : (get_conductivity,),
}
ics = {
'ic' : ('Omega', {'T.0' : 0.0}),
}
integrals = {
'i' : 1,
}
equations = {
'Temperature' : """dw_laplace.i.Omega( coef.val, s, T ) = 0"""
}
solvers = {
'ls' : ('ls.scipy_direct', {}),
'newton' : ('nls.newton', {
'i_max' : 1,
'eps_a' : 1e-10,
'eps_r' : 1.0,
'problem' : 'nonlinear'
}),
'ts' : ('ts.simple', {
't0' : t0,
't1' : t1,
'dt' : None,
'n_step' : n_step, # has precedence over dt!
'quasistatic' : True,
}),
}
options = {
'nls' : 'newton',
'ls' : 'ls',
'ts' : 'ts',
'save_steps' : -1,
}
| [
"cimrman3@ntc.zcu.cz"
] | cimrman3@ntc.zcu.cz |
5f746eab98d95e889f73892fc9d177b490c27480 | 5a017fc861db92e3a2919f260d54f1301afbb3e5 | /MIDI Remote Scripts/Akai_Force_MPC/scene_list.py | e178c94e8d3079a95f83763cd4dc3dedfafe3dce | [] | no_license | kera67/livepy_diff_ten | 8d8d0f3b76048f1fe5d4c0fbc02549dc922c7d5b | 12a0af9e9c57d0721af5036ce23af549df2c95f0 | refs/heads/master | 2023-07-14T18:26:33.591915 | 2020-11-19T07:50:28 | 2020-11-19T07:50:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,597 | py | from __future__ import absolute_import, print_function, unicode_literals
from itertools import izip_longest
from ableton.v2.base import listens
from ableton.v2.control_surface import Component
from .scene import MPCSceneComponent
class SceneListComponent(Component):
def __init__(self, session_ring = None, num_scenes = 0, *a, **k):
super(SceneListComponent, self).__init__(*a, **k)
assert session_ring is not None
self._session_ring = session_ring
self.__on_offsets_changed.subject = session_ring
self._scenes = [ MPCSceneComponent(parent=self, session_ring=session_ring) for _ in xrange(num_scenes) ]
self.__on_scene_list_changed.subject = self.song
self._reassign_scenes()
def set_scene_launch_buttons(self, buttons):
for scene, button in izip_longest(self._scenes, buttons or []):
scene.set_launch_button(button)
def set_scene_color_controls(self, controls):
for scene, control in izip_longest(self._scenes, controls or []):
scene.scene_color_control.set_control_element(control)
@listens(u'offset')
def __on_offsets_changed(self, *a):
if self.is_enabled():
self._reassign_scenes()
@listens(u'scenes')
def __on_scene_list_changed(self):
self._reassign_scenes()
def _reassign_scenes(self):
scenes = self.song.scenes
for index, scene in enumerate(self._scenes):
scene_index = self._session_ring.scene_offset + index
scene.set_scene(scenes[scene_index] if len(scenes) > scene_index else None)
| [
"aumhaa@gmail.com"
] | aumhaa@gmail.com |
1883d5b5017f1427e94dafcc76d72300f1e7aa23 | 56f6d83f169b75cfc83f40d50c59b14d1521fd64 | /7.爬虫/day7/ITCAST/ITCAST/settings.py | ef68056fa5b7d323c01bdc9ff5761b322913fe3d | [] | no_license | songaiwen/information_29_01 | e22b8442e448a351057c6b9a7b81ede570b43e4a | e3c4cca8c05149720430ac0779081cc2aff63db6 | refs/heads/master | 2020-03-18T11:01:48.996804 | 2018-08-25T06:47:24 | 2018-08-25T06:47:24 | 134,553,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,067 | py | # -*- coding: utf-8 -*-
# Scrapy settings for ITCAST project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://doc.scrapy.org/en/latest/topics/settings.html
# https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'ITCAST'
SPIDER_MODULES = ['ITCAST.spiders']
NEWSPIDER_MODULE = 'ITCAST.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
USER_AGENT = 'ITCAST (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'ITCAST.middlewares.ItcastSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'ITCAST.middlewares.ItcastDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://doc.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'ITCAST.pipelines.ItcastPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"576883213@qq.com"
] | 576883213@qq.com |
1e40ba7fc6198751838b104388ecf529c831ea09 | 52ec517d8a990120b0e807e0c8f76056672c4ee1 | /python/hashing/internationalPhoneBookDynamicArray.py | 465c1d7268ceca51aa3b7302a0ee1413cb5baa86 | [] | no_license | macoto35/Data_Structures_Fundamentals | aaae053d270b6cd5472d755354b4b15d336c3c06 | f4cb82380f3b807889365514439b1a83afa07035 | refs/heads/master | 2020-03-20T20:41:15.437091 | 2019-08-20T07:10:27 | 2019-08-20T07:10:27 | 137,700,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,850 | py | import re
class InternationalPhoneBookDynamicArray:
def __init__(self):
self.arr = [None] * 1
self.size = 0
self.maxSize = 1
class Pair:
def __init__(self, number, name):
self.number = number
self.name = name
def _binarySearch(self, number, st, ed):
if st == ed:
if self._getInt(self.arr[st].number) < number:
return st + 1
else:
return st
mid = st + (ed - st) // 2
val = self._getInt(self.arr[mid].number)
if val == number:
return mid
elif val < number:
return self._binarySearch(number, mid + 1, ed)
else:
return self._binarySearch(number, st, mid - 1)
def _getInt(self, number):
return re.sub('\D', '', number)
def setName(self, number, name):
# resize
if self.size == self.maxSize:
newMaxSize = self.maxSize * 2
newArr = [None] * newMaxSize
for i in range(self.maxSize):
newArr[i] = self.arr[i]
self.arr = newArr
self.maxSize = newMaxSize
# append
pair = self.Pair(number, name)
if self.size == 0:
self.arr[self.size] = pair
else:
idx = self._binarySearch(self._getInt(number), 0, self.size - 1)
for i in range(self.size - 1, idx - 1):
self.arr[i + 1] = self.arr[i]
self.arr[idx] = pair
self.size += 1
def getName(self, number):
idx = self._binarySearch(self._getInt(number), 0, self.size - 1)
val = self.arr[idx]
if val is not None and self.arr[idx].number == number:
return self.arr[idx].name
else:
return None
| [
"sohee.um@mercer.com"
] | sohee.um@mercer.com |
072672d8fa7bf5988279befb5a3ba45eff6aafeb | 6a0a634265957e9dcd26bc80e3304e107fb004d0 | /venvflask/lib/python3.7/site-packages/Crypto/Cipher/AES.pyi | 159c4af2e9ea3f88a1f25e4b80981d6dce07c073 | [] | no_license | ogutiann/PythonEthereumSmartContracts | 8bd81aa14eab567d41b5dad74b67aba92a405ebd | d870e9fd1c7f68b8493db4c2b2af224f966d8e51 | refs/heads/master | 2023-01-04T14:23:12.396898 | 2020-10-29T12:12:46 | 2020-10-29T12:12:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,336 | pyi | from typing import Union, Tuple, Optional, Dict
from Crypto.Cipher._mode_ecb import EcbMode
from Crypto.Cipher._mode_cbc import CbcMode
from Crypto.Cipher._mode_cfb import CfbMode
from Crypto.Cipher._mode_ofb import OfbMode
from Crypto.Cipher._mode_ctr import CtrMode
from Crypto.Cipher._mode_openpgp import OpenPgpMode
from Crypto.Cipher._mode_ccm import CcmMode
from Crypto.Cipher._mode_eax import EaxMode
from Crypto.Cipher._mode_gcm import GcmMode
from Crypto.Cipher._mode_siv import SivMode
from Crypto.Cipher._mode_ocb import OcbMode
AESMode = int
MODE_ECB: AESMode
MODE_CBC: AESMode
MODE_CFB: AESMode
MODE_OFB: AESMode
MODE_CTR: AESMode
MODE_OPENPGP: AESMode
MODE_CCM: AESMode
MODE_EAX: AESMode
MODE_GCM: AESMode
MODE_SIV: AESMode
MODE_OCB: AESMode
Buffer = Union[bytes, bytearray, memoryview]
def new(key: Buffer,
mode: AESMode,
iv: Buffer = ...,
IV: Buffer = ...,
nonce: Buffer = ...,
segment_size: int = ...,
mac_len: int = ...,
assoc_len: int = ...,
initial_value: Union[int, Buffer] = ...,
counter: Dict = ...,
use_aesni: bool = ...) -> \
Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode,
OpenPgpMode, CcmMode, EaxMode, GcmMode,
SivMode, OcbMode]: ...
block_size: int
key_size: Tuple[int, int, int]
| [
"sijoythomas@pop-os.localdomain"
] | sijoythomas@pop-os.localdomain |
48603d6812cdc33953ca8edc661c65f26addb1f2 | dd3bbd4e7aaee7a8a5f26b927ce28ac472c855a5 | /eggs/Products.PluginRegistry-1.3b1-py2.7.egg/Products/PluginRegistry/exportimport.py | 0cf253c067eca3dc9ba6c34b21277a858a5aaa2e | [] | no_license | nacho22martin/tesis | ea0a822f8bdbdef6f13f41276ecd4d6e85427ca5 | e137eb6225cc5e724bee74a892567796166134ac | refs/heads/master | 2020-12-24T13:20:58.334839 | 2013-11-09T12:42:41 | 2013-11-09T12:42:41 | 14,261,570 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,876 | py | ##############################################################################
#
# Copyright (c) 2005 Zope Foundation and Contributors
# Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this
# distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
""" GenericSetup export / import support for PluginRegistry.
$Id: exportimport.py 110543 2010-04-06 03:23:52Z tseaver $
"""
from StringIO import StringIO
from Persistence import PersistentMapping
from zope.interface import implements
from Products.GenericSetup.interfaces import IFilesystemExporter
from Products.GenericSetup.interfaces import IFilesystemImporter
from Products.GenericSetup.content import FauxDAVRequest
from Products.GenericSetup.content import FauxDAVResponse
from Products.GenericSetup.utils import ExportConfiguratorBase
from Products.GenericSetup.utils import ImportConfiguratorBase
from Products.GenericSetup.utils import _getDottedName
from Products.GenericSetup.utils import _resolveDottedName
from Products.GenericSetup.utils import CONVERTER
from Products.GenericSetup.utils import DEFAULT
from Products.GenericSetup.utils import KEY
from Products.PageTemplates.PageTemplateFile import PageTemplateFile
from interfaces import IPluginRegistry
def _providedBy(obj, iface):
return iface.providedBy(obj)
_FILENAME = 'pluginregistry.xml'
def _getRegistry(site):
registries = [x for x in site.objectValues()
if _providedBy(x, IPluginRegistry)]
if len(registries) < 1:
raise ValueError, 'No plugin registries'
if len(registries) > 1:
raise ValueError, 'Too many plugin registries'
return registries[0]
def exportPluginRegistry(context):
""" Export plugin registry as an XML file.
o Designed for use as a GenericSetup export step.
"""
registry = _getRegistry(context.getSite())
pre = PluginRegistryExporter(registry).__of__(registry)
xml = pre.generateXML()
context.writeDataFile(_FILENAME, xml, 'text/xml')
return 'Plugin registry exported.'
def _updatePluginRegistry(registry, xml, should_purge, encoding=None):
if should_purge:
registry._plugin_types = []
registry._plugin_type_info = PersistentMapping()
registry._plugins = PersistentMapping()
# When PAS import is used in an extension profile, the plugin
# registry will have been deleted (content import deletes by
# default) but should_purge will be false; need to initialize
# _plugins since PluginRegistry's constructor doesn't
if registry._plugins is None:
registry._plugins = PersistentMapping()
pir = PluginRegistryImporter(registry, encoding)
reg_info = pir.parseXML(xml)
for info in reg_info['plugin_types']:
iface = _resolveDottedName(info['interface'])
# Avoid duplicate plugin types
if iface not in registry._plugin_types:
registry._plugin_types.append(iface)
registry._plugin_type_info[iface] = {'id': info['id'],
'title': info['title'],
'description': info['description'],
}
registry._plugins[iface] = tuple([x['id'] for x in info['plugins']])
def importPluginRegistry(context):
""" Import plugin registry from an XML file.
o Designed for use as a GenericSetup import step.
"""
registry = _getRegistry(context.getSite())
encoding = context.getEncoding()
xml = context.readDataFile(_FILENAME)
if xml is None:
return 'Site properties: Nothing to import.'
_updatePluginRegistry(registry, xml, context.shouldPurge(), encoding)
return 'Plugin registry imported.'
class PluginRegistryExporter(ExportConfiguratorBase):
def __init__(self, context, encoding=None):
ExportConfiguratorBase.__init__(self, None, encoding)
self.context = context
def _getExportTemplate(self):
return PageTemplateFile('xml/pirExport.xml', globals())
def listPluginTypes(self):
for info in self.context.listPluginTypeInfo():
iface = info['interface']
info['interface'] = _getDottedName(iface)
info['plugins'] = self.context.listPluginIds(iface)
yield info
class PluginRegistryImporter(ImportConfiguratorBase):
def __init__(self, context, encoding=None):
ImportConfiguratorBase.__init__(self, None, encoding)
self.context = context
def _getImportMapping(self):
return {
'plugin-registry':
{'plugin-type': {KEY: 'plugin_types', DEFAULT: ()},
},
'plugin-type':
{'id': {KEY: 'id'},
'interface': {KEY: 'interface'},
'title': {KEY: 'title'},
'description': {KEY: 'description'},
'plugin': {KEY: 'plugins', DEFAULT: ()}
},
'plugin':
{'id': {KEY: 'id'},
},
}
class PluginRegistryFileExportImportAdapter(object):
""" Designed for ues when exporting / importing PR's within a container.
"""
implements(IFilesystemExporter, IFilesystemImporter)
def __init__(self, context):
self.context = context
def export(self, export_context, subdir, root=False):
""" See IFilesystemExporter.
"""
context = self.context
pre = PluginRegistryExporter(context).__of__(context)
xml = pre.generateXML()
export_context.writeDataFile(_FILENAME,
xml,
'text/xml',
subdir,
)
def listExportableItems(self):
""" See IFilesystemExporter.
"""
return ()
def import_(self, import_context, subdir, root=False):
""" See IFilesystemImporter.
"""
data = import_context.readDataFile(_FILENAME, subdir)
if data is None:
import_context.note('SGAIFA',
'no pluginregistry.xml in %s' % subdir)
else:
request = FauxDAVRequest(BODY=data, BODYFILE=StringIO(data))
response = FauxDAVResponse()
_updatePluginRegistry(self.context,
data,
import_context.shouldPurge(),
import_context.getEncoding(),
)
| [
"ignacio@plone.(none)"
] | ignacio@plone.(none) |
9f0dd34ffaa2659b3d64db3c510cf958661ce188 | 6086817f6614063ac3c4ce148a3737820cca0d44 | /tests/test_python_library.py | fdcef75a8f7f509ce4f5bb381c2a2b150c4a679b | [
"Apache-2.0"
] | permissive | Tryweirder/synthtool | 0a42a5e5e60bbe6362e4e3c489a2a29b52d9b9eb | 082e1ca0863b13ada8594fe91845380765da5b70 | refs/heads/master | 2023-04-03T03:51:38.260575 | 2021-04-13T00:00:05 | 2021-04-13T00:00:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,828 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from pathlib import Path
import pytest
from synthtool import gcp
from synthtool.sources import templates
PYTHON_LIBRARY = Path(__file__).parent.parent / "synthtool/gcp/templates/python_library"
@pytest.mark.parametrize(
["template_kwargs", "expected_text"],
[
({}, ["import nox", 'session.install("-e", ".", "-c", constraints_path)']),
(
{"unit_test_local_dependencies": ["../testutils", "../unitutils"]},
[
'session.install("-e", "../testutils", "-c", constraints_path)',
'session.install("-e", "../unitutils", "-c", constraints_path)',
],
),
(
{"system_test_local_dependencies": ["../testutils", "../sysutils"]},
[
'session.install("-e", "../testutils", "-c", constraints_path)',
'session.install("-e", "../sysutils", "-c", constraints_path)',
],
),
(
{"unit_test_extras": ["abc", "def"]},
['session.install("-e", ".[abc,def]", "-c", constraints_path)'],
),
(
{"system_test_extras": ["abc", "def"]},
['session.install("-e", ".[abc,def]", "-c", constraints_path)'],
),
(
{"unit_test_extras_by_python": {"3.8": ["abc", "def"]}},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = ""',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
(
{"system_test_extras_by_python": {"3.8": ["abc", "def"]}},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = ""',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
(
{
"unit_test_extras": ["tuv", "wxyz"],
"unit_test_extras_by_python": {"3.8": ["abc", "def"]},
},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = "[tuv,wxyz]"',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
(
{
"system_test_extras": ["tuv", "wxyz"],
"system_test_extras_by_python": {"3.8": ["abc", "def"]},
},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = "[tuv,wxyz]"',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
],
)
def test_library_noxfile(template_kwargs, expected_text):
t = templates.Templates(PYTHON_LIBRARY)
result = t.render("noxfile.py.j2", **template_kwargs,).read_text()
# Validate Python syntax.
result_code = compile(result, "noxfile.py", "exec")
assert result_code is not None
for expected in expected_text:
assert expected in result
def test_python_library():
os.chdir(Path(__file__).parent / "fixtures/python_library")
template_dir = Path(__file__).parent.parent / "synthtool/gcp/templates"
common = gcp.CommonTemplates(template_path=template_dir)
templated_files = common.py_library()
assert os.path.exists(templated_files / ".kokoro/docs/docs-presubmit.cfg")
assert os.path.exists(templated_files / ".kokoro/docker/docs/fetch_gpg_keys.sh")
def test_split_system_tests():
os.chdir(Path(__file__).parent / "fixtures/python_library")
template_dir = Path(__file__).parent.parent / "synthtool/gcp/templates"
common = gcp.CommonTemplates(template_path=template_dir)
templated_files = common.py_library(split_system_tests=True)
with open(templated_files / ".kokoro/presubmit/presubmit.cfg", "r") as f:
contents = f.read()
assert "RUN_SYSTEM_TESTS" in contents
assert "false" in contents
assert os.path.exists(templated_files / ".kokoro/presubmit/system-3.8.cfg")
with open(templated_files / ".kokoro/presubmit/system-3.8.cfg", "r") as f:
contents = f.read()
assert "system-3.8" in contents
| [
"noreply@github.com"
] | Tryweirder.noreply@github.com |
271210fe7b439920554148561f2c82e2d4a01235 | 3bde5908bc285abb545be0f8dc7fe698fed908ba | /Dag1/live_koding/variabler.py | 90b1202e01df2749bb8d0c9f2e246068706bd688 | [] | no_license | kodeskolen/inspiria_h21 | cdac18e9ea45845b2f5c0d820f17f7c68e2cb222 | bd7070add6675754962632f58b01bcce8b847fdc | refs/heads/main | 2023-07-13T01:56:01.118767 | 2021-08-20T07:00:51 | 2021-08-20T07:00:51 | 371,284,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 18 07:29:44 2021
@author: Marie
"""
navn = "Marie"
print("Hei", navn)
x = 5
y = 2
resultat = x % y
print(resultat) | [
"roald.marie@gmail.com"
] | roald.marie@gmail.com |
5395949eb11a74f63a417d29d597799633103517 | 91f30c829664ff409177e83776c9f4e2e98d9fc4 | /manage.py | 90058a6e64433bcd765b52dfcbb015079c97e687 | [] | no_license | TotalityHacks/madras | 3ac92dc6caf989efcb02590f6474ab333d1f93fa | 2395a703eed1a87cca3cdd6c0fb9162b69e8df27 | refs/heads/master | 2021-08-17T15:29:41.055074 | 2018-07-18T23:05:29 | 2018-07-18T23:05:29 | 105,232,414 | 4 | 5 | null | 2021-03-31T18:58:56 | 2017-09-29T05:13:41 | Python | UTF-8 | Python | false | false | 249 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madras.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"jreinstra@gmail.com"
] | jreinstra@gmail.com |
ee0ff1e851be11b5b0aeee46e55a244ad3a33dbc | fdb9bdc6c4ab2f14ba71e544493706d5e275899f | /fhir/resources/requirements.py | 877404c36a97cba120a04ba6d471c45f4576175f | [
"BSD-3-Clause"
] | permissive | nazrulworld/fhir.resources | 6ae8aea8180c611b0c5050759c6dcdf63e4cb061 | 1fd6ea476b27b3fcb8c4ef8f23bc51cf161e69e3 | refs/heads/main | 2023-08-30T18:27:27.277249 | 2023-07-03T19:57:06 | 2023-07-03T19:57:06 | 165,297,877 | 256 | 83 | NOASSERTION | 2023-08-24T15:34:05 | 2019-01-11T19:26:41 | Python | UTF-8 | Python | false | false | 29,907 | py | # -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Requirements
Release: R5
Version: 5.0.0
Build ID: 2aecd53
Last updated: 2023-03-26T15:21:02.749+11:00
"""
import typing
from pydantic import Field, root_validator
from pydantic.error_wrappers import ErrorWrapper, ValidationError
from pydantic.errors import MissingError, NoneIsNotAllowedError
from . import backboneelement, domainresource, fhirtypes
class Requirements(domainresource.DomainResource):
"""Disclaimer: Any field name ends with ``__ext`` doesn't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
A set of requirements - features of systems that are necessary.
A set of requirements - a list of features or behaviors of designed systems
that are necessary to achieve organizational or regulatory goals.
"""
resource_type = Field("Requirements", const=True)
actor: typing.List[typing.Optional[fhirtypes.Canonical]] = Field(
None,
alias="actor",
title="Actor for these requirements",
description="An actor these requirements are in regard to.",
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["ActorDefinition"],
)
actor__ext: typing.List[
typing.Union[fhirtypes.FHIRPrimitiveExtensionType, None]
] = Field(None, alias="_actor", title="Extension field for ``actor``.")
contact: typing.List[fhirtypes.ContactDetailType] = Field(
None,
alias="contact",
title="Contact details for the publisher",
description=(
"Contact details to assist a user in finding and communicating with the"
" publisher."
),
# if property is element of this resource.
element_property=True,
)
copyright: fhirtypes.Markdown = Field(
None,
alias="copyright",
title="Use and/or publishing restrictions",
description=(
"A copyright statement relating to the Requirements and/or its "
"contents. Copyright statements are generally legal restrictions on the"
" use and publishing of the Requirements."
),
# if property is element of this resource.
element_property=True,
)
copyright__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_copyright", title="Extension field for ``copyright``."
)
copyrightLabel: fhirtypes.String = Field(
None,
alias="copyrightLabel",
title="Copyright holder and year(s)",
description=(
"A short string (<50 characters), suitable for inclusion in a page "
"footer that identifies the copyright holder, effective period, and "
"optionally whether rights are resctricted. (e.g. 'All rights "
"reserved', 'Some rights reserved')."
),
# if property is element of this resource.
element_property=True,
)
copyrightLabel__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_copyrightLabel", title="Extension field for ``copyrightLabel``."
)
date: fhirtypes.DateTime = Field(
None,
alias="date",
title="Date last changed",
description=(
"The date (and optionally time) when the Requirements was published. "
"The date must change when the business version changes and it must "
"change if the status code changes. In addition, it should change when "
"the substantive content of the Requirements changes."
),
# if property is element of this resource.
element_property=True,
)
date__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_date", title="Extension field for ``date``."
)
derivedFrom: typing.List[typing.Optional[fhirtypes.Canonical]] = Field(
None,
alias="derivedFrom",
title="Other set of Requirements this builds on",
description=(
"Another set of Requirements that this set of Requirements builds on "
"and updates."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Requirements"],
)
derivedFrom__ext: typing.List[
typing.Union[fhirtypes.FHIRPrimitiveExtensionType, None]
] = Field(None, alias="_derivedFrom", title="Extension field for ``derivedFrom``.")
description: fhirtypes.Markdown = Field(
None,
alias="description",
title="Natural language description of the requirements",
description="A free text natural language description of the requirements.",
# if property is element of this resource.
element_property=True,
)
description__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_description", title="Extension field for ``description``."
)
experimental: bool = Field(
None,
alias="experimental",
title="For testing purposes, not real usage",
description=(
"A Boolean value to indicate that this Requirements is authored for "
"testing purposes (or education/evaluation/marketing) and is not "
"intended to be used for genuine usage."
),
# if property is element of this resource.
element_property=True,
)
experimental__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_experimental", title="Extension field for ``experimental``."
)
identifier: typing.List[fhirtypes.IdentifierType] = Field(
None,
alias="identifier",
title="Additional identifier for the Requirements (business identifier)",
description=(
"A formal identifier that is used to identify this Requirements when it"
" is represented in other formats, or referenced in a specification, "
"model, design or an instance."
),
# if property is element of this resource.
element_property=True,
)
jurisdiction: typing.List[fhirtypes.CodeableConceptType] = Field(
None,
alias="jurisdiction",
title="Intended jurisdiction for Requirements (if applicable)",
description=(
"A legal or geographic region in which the Requirements is intended to "
"be used."
),
# if property is element of this resource.
element_property=True,
)
name: fhirtypes.String = Field(
None,
alias="name",
title="Name for this Requirements (computer friendly)",
description=(
"A natural language name identifying the Requirements. This name should"
" be usable as an identifier for the module by machine processing "
"applications such as code generation."
),
# if property is element of this resource.
element_property=True,
)
name__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_name", title="Extension field for ``name``."
)
publisher: fhirtypes.String = Field(
None,
alias="publisher",
title="Name of the publisher/steward (organization or individual)",
description=(
"The name of the organization or individual responsible for the release"
" and ongoing maintenance of the Requirements."
),
# if property is element of this resource.
element_property=True,
)
publisher__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_publisher", title="Extension field for ``publisher``."
)
purpose: fhirtypes.Markdown = Field(
None,
alias="purpose",
title="Why this Requirements is defined",
description=(
"Explanation of why this Requirements is needed and why it has been "
"designed as it has."
),
# if property is element of this resource.
element_property=True,
)
purpose__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_purpose", title="Extension field for ``purpose``."
)
reference: typing.List[typing.Optional[fhirtypes.Url]] = Field(
None,
alias="reference",
title=(
"External artifact (rule/document etc. that) created this set of "
"requirements"
),
description=(
"A reference to another artifact that created this set of requirements."
" This could be a Profile, etc., or external regulation, or business "
"requirements expressed elsewhere."
),
# if property is element of this resource.
element_property=True,
)
reference__ext: typing.List[
typing.Union[fhirtypes.FHIRPrimitiveExtensionType, None]
] = Field(None, alias="_reference", title="Extension field for ``reference``.")
statement: typing.List[fhirtypes.RequirementsStatementType] = Field(
None,
alias="statement",
title="Actual statement as markdown",
description="The actual statement of requirement, in markdown format.",
# if property is element of this resource.
element_property=True,
)
status: fhirtypes.Code = Field(
None,
alias="status",
title="draft | active | retired | unknown",
description=(
"The status of this Requirements. Enables tracking the life-cycle of "
"the content."
),
# if property is element of this resource.
element_property=True,
element_required=True,
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["draft", "active", "retired", "unknown"],
)
status__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_status", title="Extension field for ``status``."
)
title: fhirtypes.String = Field(
None,
alias="title",
title="Name for this Requirements (human friendly)",
description="A short, descriptive, user-friendly title for the Requirements.",
# if property is element of this resource.
element_property=True,
)
title__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_title", title="Extension field for ``title``."
)
url: fhirtypes.Uri = Field(
None,
alias="url",
title=(
"Canonical identifier for this Requirements, represented as a URI "
"(globally unique)"
),
description=(
"An absolute URI that is used to identify this Requirements when it is "
"referenced in a specification, model, design or an instance; also "
"called its canonical identifier. This SHOULD be globally unique and "
"SHOULD be a literal address at which an authoritative instance of this"
" Requirements is (or will be) published. This URL can be the target of"
" a canonical reference. It SHALL remain the same when the Requirements"
" is stored on different servers."
),
# if property is element of this resource.
element_property=True,
)
url__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_url", title="Extension field for ``url``."
)
useContext: typing.List[fhirtypes.UsageContextType] = Field(
None,
alias="useContext",
title="The context that the content is intended to support",
description=(
"The content was developed with a focus and intent of supporting the "
"contexts that are listed. These contexts may be general categories "
"(gender, age, ...) or may be references to specific programs "
"(insurance plans, studies, ...) and may be used to assist with "
"indexing and searching for appropriate Requirements instances."
),
# if property is element of this resource.
element_property=True,
)
version: fhirtypes.String = Field(
None,
alias="version",
title="Business version of the Requirements",
description=(
"The identifier that is used to identify this version of the "
"Requirements when it is referenced in a specification, model, design "
"or instance. This is an arbitrary value managed by the Requirements "
"author and is not expected to be globally unique. For example, it "
"might be a timestamp (e.g. yyyymmdd) if a managed version is not "
"available. There is also no expectation that versions can be placed in"
" a lexicographical sequence."
),
# if property is element of this resource.
element_property=True,
)
version__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_version", title="Extension field for ``version``."
)
versionAlgorithmCoding: fhirtypes.CodingType = Field(
None,
alias="versionAlgorithmCoding",
title="How to compare versions",
description=(
"Indicates the mechanism used to compare versions to determine which is"
" more current."
),
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e versionAlgorithm[x]
one_of_many="versionAlgorithm",
one_of_many_required=False,
)
versionAlgorithmString: fhirtypes.String = Field(
None,
alias="versionAlgorithmString",
title="How to compare versions",
description=(
"Indicates the mechanism used to compare versions to determine which is"
" more current."
),
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e versionAlgorithm[x]
one_of_many="versionAlgorithm",
one_of_many_required=False,
)
versionAlgorithmString__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None,
alias="_versionAlgorithmString",
title="Extension field for ``versionAlgorithmString``.",
)
@classmethod
def elements_sequence(cls):
"""returning all elements names from
``Requirements`` according specification,
with preserving original sequence order.
"""
return [
"id",
"meta",
"implicitRules",
"language",
"text",
"contained",
"extension",
"modifierExtension",
"url",
"identifier",
"version",
"versionAlgorithmString",
"versionAlgorithmCoding",
"name",
"title",
"status",
"experimental",
"date",
"publisher",
"contact",
"description",
"useContext",
"jurisdiction",
"purpose",
"copyright",
"copyrightLabel",
"derivedFrom",
"reference",
"actor",
"statement",
]
@root_validator(pre=True, allow_reuse=True)
def validate_required_primitive_elements_1481(
cls, values: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
"""https://www.hl7.org/fhir/extensibility.html#Special-Case
In some cases, implementers might find that they do not have appropriate data for
an element with minimum cardinality = 1. In this case, the element must be present,
but unless the resource or a profile on it has made the actual value of the primitive
data type mandatory, it is possible to provide an extension that explains why
the primitive value is not present.
"""
required_fields = [("status", "status__ext")]
_missing = object()
def _fallback():
return ""
errors: typing.List["ErrorWrapper"] = []
for name, ext in required_fields:
field = cls.__fields__[name]
ext_field = cls.__fields__[ext]
value = values.get(field.alias, _missing)
if value not in (_missing, None):
continue
ext_value = values.get(ext_field.alias, _missing)
missing_ext = True
if ext_value not in (_missing, None):
if isinstance(ext_value, dict):
missing_ext = len(ext_value.get("extension", [])) == 0
elif (
getattr(ext_value.__class__, "get_resource_type", _fallback)()
== "FHIRPrimitiveExtension"
):
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
else:
validate_pass = True
for validator in ext_field.type_.__get_validators__():
try:
ext_value = validator(v=ext_value)
except ValidationError as exc:
errors.append(ErrorWrapper(exc, loc=ext_field.alias))
validate_pass = False
if not validate_pass:
continue
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
if missing_ext:
if value is _missing:
errors.append(ErrorWrapper(MissingError(), loc=field.alias))
else:
errors.append(
ErrorWrapper(NoneIsNotAllowedError(), loc=field.alias)
)
if len(errors) > 0:
raise ValidationError(errors, cls) # type: ignore
return values
@root_validator(pre=True, allow_reuse=True)
def validate_one_of_many_1481(
cls, values: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
"""https://www.hl7.org/fhir/formats.html#choice
A few elements have a choice of more than one data type for their content.
All such elements have a name that takes the form nnn[x].
The "nnn" part of the name is constant, and the "[x]" is replaced with
the title-cased name of the type that is actually used.
The table view shows each of these names explicitly.
Elements that have a choice of data type cannot repeat - they must have a
maximum cardinality of 1. When constructing an instance of an element with a
choice of types, the authoring system must create a single element with a
data type chosen from among the list of permitted data types.
"""
one_of_many_fields = {
"versionAlgorithm": ["versionAlgorithmCoding", "versionAlgorithmString"]
}
for prefix, fields in one_of_many_fields.items():
assert cls.__fields__[fields[0]].field_info.extra["one_of_many"] == prefix
required = (
cls.__fields__[fields[0]].field_info.extra["one_of_many_required"]
is True
)
found = False
for field in fields:
if field in values and values[field] is not None:
if found is True:
raise ValueError(
"Any of one field value is expected from "
f"this list {fields}, but got multiple!"
)
else:
found = True
if required is True and found is False:
raise ValueError(f"Expect any of field value from this list {fields}.")
return values
class RequirementsStatement(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` doesn't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Actual statement as markdown.
The actual statement of requirement, in markdown format.
"""
resource_type = Field("RequirementsStatement", const=True)
conditionality: bool = Field(
None,
alias="conditionality",
title="Set to true if requirements statement is conditional",
description=(
"This boolean flag is set to true of the text of the requirement is "
"conditional on something e.g. it includes lanauage like 'if x then y'."
" This conditionality flag is introduced for purposes of filtering and "
"colour highlighting etc."
),
# if property is element of this resource.
element_property=True,
)
conditionality__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_conditionality", title="Extension field for ``conditionality``."
)
conformance: typing.List[typing.Optional[fhirtypes.Code]] = Field(
None,
alias="conformance",
title="SHALL | SHOULD | MAY | SHOULD-NOT",
description="A short human usable label for this statement.",
# if property is element of this resource.
element_property=True,
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["SHALL", "SHOULD", "MAY", "SHOULD-NOT"],
)
conformance__ext: typing.List[
typing.Union[fhirtypes.FHIRPrimitiveExtensionType, None]
] = Field(None, alias="_conformance", title="Extension field for ``conformance``.")
derivedFrom: fhirtypes.String = Field(
None,
alias="derivedFrom",
title="Another statement this clarifies/restricts ([url#]key)",
description=(
"Another statement on one of the requirements that this requirement "
"clarifies or restricts."
),
# if property is element of this resource.
element_property=True,
)
derivedFrom__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_derivedFrom", title="Extension field for ``derivedFrom``."
)
key: fhirtypes.Id = Field(
None,
alias="key",
title="Key that identifies this statement",
description="Key that identifies this statement (unique within this resource).",
# if property is element of this resource.
element_property=True,
element_required=True,
)
key__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_key", title="Extension field for ``key``."
)
label: fhirtypes.String = Field(
None,
alias="label",
title="Short Human label for this statement",
description="A short human usable label for this statement.",
# if property is element of this resource.
element_property=True,
)
label__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_label", title="Extension field for ``label``."
)
parent: fhirtypes.String = Field(
None,
alias="parent",
title="A larger requirement that this requirement helps to refine and enable",
description=None,
# if property is element of this resource.
element_property=True,
)
parent__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_parent", title="Extension field for ``parent``."
)
reference: typing.List[typing.Optional[fhirtypes.Url]] = Field(
None,
alias="reference",
title="External artifact (rule/document etc. that) created this requirement",
description=(
"A reference to another artifact that created this requirement. This "
"could be a Profile, etc., or external regulation, or business "
"requirements expressed elsewhere."
),
# if property is element of this resource.
element_property=True,
)
reference__ext: typing.List[
typing.Union[fhirtypes.FHIRPrimitiveExtensionType, None]
] = Field(None, alias="_reference", title="Extension field for ``reference``.")
requirement: fhirtypes.Markdown = Field(
None,
alias="requirement",
title="The actual requirement",
description="The actual requirement for human consumption.",
# if property is element of this resource.
element_property=True,
element_required=True,
)
requirement__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_requirement", title="Extension field for ``requirement``."
)
satisfiedBy: typing.List[typing.Optional[fhirtypes.Url]] = Field(
None,
alias="satisfiedBy",
title="Design artifact that satisfies this requirement",
description=(
"A reference to another artifact that satisfies this requirement. This "
"could be a Profile, extension, or an element in one of those, or a "
"CapabilityStatement, OperationDefinition, SearchParameter, "
"CodeSystem(/code), ValueSet, Libary etc."
),
# if property is element of this resource.
element_property=True,
)
satisfiedBy__ext: typing.List[
typing.Union[fhirtypes.FHIRPrimitiveExtensionType, None]
] = Field(None, alias="_satisfiedBy", title="Extension field for ``satisfiedBy``.")
source: typing.List[fhirtypes.ReferenceType] = Field(
None,
alias="source",
title="Who asked for this statement",
description=(
"Who asked for this statement to be a requirement. By default, it's "
"assumed that the publisher knows who it is if it matters."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=[
"CareTeam",
"Device",
"Group",
"HealthcareService",
"Organization",
"Patient",
"Practitioner",
"PractitionerRole",
"RelatedPerson",
],
)
@classmethod
def elements_sequence(cls):
"""returning all elements names from
``RequirementsStatement`` according specification,
with preserving original sequence order.
"""
return [
"id",
"extension",
"modifierExtension",
"key",
"label",
"conformance",
"conditionality",
"requirement",
"derivedFrom",
"parent",
"satisfiedBy",
"reference",
"source",
]
@root_validator(pre=True, allow_reuse=True)
def validate_required_primitive_elements_2431(
cls, values: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
"""https://www.hl7.org/fhir/extensibility.html#Special-Case
In some cases, implementers might find that they do not have appropriate data for
an element with minimum cardinality = 1. In this case, the element must be present,
but unless the resource or a profile on it has made the actual value of the primitive
data type mandatory, it is possible to provide an extension that explains why
the primitive value is not present.
"""
required_fields = [("key", "key__ext"), ("requirement", "requirement__ext")]
_missing = object()
def _fallback():
return ""
errors: typing.List["ErrorWrapper"] = []
for name, ext in required_fields:
field = cls.__fields__[name]
ext_field = cls.__fields__[ext]
value = values.get(field.alias, _missing)
if value not in (_missing, None):
continue
ext_value = values.get(ext_field.alias, _missing)
missing_ext = True
if ext_value not in (_missing, None):
if isinstance(ext_value, dict):
missing_ext = len(ext_value.get("extension", [])) == 0
elif (
getattr(ext_value.__class__, "get_resource_type", _fallback)()
== "FHIRPrimitiveExtension"
):
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
else:
validate_pass = True
for validator in ext_field.type_.__get_validators__():
try:
ext_value = validator(v=ext_value)
except ValidationError as exc:
errors.append(ErrorWrapper(exc, loc=ext_field.alias))
validate_pass = False
if not validate_pass:
continue
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
if missing_ext:
if value is _missing:
errors.append(ErrorWrapper(MissingError(), loc=field.alias))
else:
errors.append(
ErrorWrapper(NoneIsNotAllowedError(), loc=field.alias)
)
if len(errors) > 0:
raise ValidationError(errors, cls) # type: ignore
return values
| [
"connect2nazrul@gmail.com"
] | connect2nazrul@gmail.com |
92fd5fb41a07098ce8fa2e387838af6b02e4ddb0 | e0980f704a573894350e285f66f4cf390837238e | /.history/streams/blocks_20201023163431.py | afd7b53cc53e254155bfcd350fb70f1c72afcd68 | [] | no_license | rucpata/WagtailWebsite | 28008474ec779d12ef43bceb61827168274a8b61 | 5aa44f51592f49c9a708fc5515ad877c6a29dfd9 | refs/heads/main | 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,824 | py | from wagtail.core import blocks
from wagtail.images.blocks import ImageChooserBlock
class TitleBlock(blocks.StructBlock):
text = blocks.CharBlock(
required = True,
elp_text='Tekst do wyświetlenia',
)
class Meta:
template = 'streams/title_block.html'
icon = 'edycja'
label = 'Tytuł'
help_text = 'Wyśrodkowany tekst do wyświetlenia na stronie.'
class LinkValue(blocks.StructValue):
"""Dodatkowao logika dla lików"""
def url(self) -> str:
internal_page = self.get('internal_page')
external_link = self.get('external_link')
if internal_page:
return internal_page.url
elif external_link:
return external_link
return ''
class Link(blocks.StructBlock):
link_text = blocks.CharBlock(
max_length=50,
default='Więcej szczegółów'
)
interal_page = blocks.PageChooserBlock(
required=False
)
external_link = blocks.URLBlock(
required=False
)
class Meta:
value_class = LinkValue
class Card(blocks.StructBlock):
title = blocks.CharBlock(
max_length=100,
help_text = 'Pogrubiony tytuł tej karty. Maksymalnie 100 znaków.'
)
text = blocks.TextBlock(
max_length=255,
help_text='Opcjonalny tekst tej karty. Maksymalnie 255 znaków.'
)
image = ImageChooserBlock(
help_text = 'Obraz zostanie automatycznie przycięty o 570 na 370 pikseli'
)
link = Link(help_text = 'Wwybierz link')
class CardsBlock(blocks.StructBlock):
cards = blocks.ListBlock(
Card()
)
class Meta:
template = 'streams/card_block.html'
icon = 'image'
label = 'Karty standardowe'
class ImageAndTextBlock(blocks.StructBlock):
image
| [
"rucinska.patrycja@gmail.com"
] | rucinska.patrycja@gmail.com |
14da2992936439bfeab57f9548a5e32508e2378d | ea1ece02e9ff9981d7a28ef1944d2dbf06110e93 | /16_find_compact/combine_several_post.py | 438816000c33195429fa8b486386a0cab0d369e3 | [] | no_license | Spritea/Hotkey | 377bf5f820b076bb21cb21ce5655e10a805b9a82 | 14df02daabc26195f8f3969e27c68bc62791f4c3 | refs/heads/master | 2022-08-27T21:11:14.597257 | 2020-05-20T15:48:07 | 2020-05-20T15:48:07 | 178,776,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,884 | py | import cv2 as cv
import numpy as np
from pathlib import Path
import natsort
from PIL import Image
from tqdm import tqdm
#包括边边也拼起来
def combine_one(imgs_list, img_path, imgwidth, imgheight):
im = Image.fromarray(imgs_list[0])
width, height = im.size
row_res = imgheight % height
col_res = imgwidth % width
img_row = int(imgheight / height) if row_res == 0 else int(imgheight / height) + 1
# every row in big image contains img_row images
img_col = int(imgwidth / width) if col_res == 0 else int(imgwidth / width) + 1
blank = Image.new("RGB", (imgwidth, imgheight))
for k in range(img_row):
for j in range(img_col):
p = Image.fromarray(imgs_list[j + k * img_col])
if j + 1 == img_col and k + 1 < img_row and col_res > 0:
box = (width - col_res, 0, width, height)
p = p.crop(box)
elif j + 1 < img_col and k + 1 == img_row and row_res > 0:
box = (0, height - row_res, width, height)
p = p.crop(box)
elif j + 1 == img_col and k + 1 == img_row and col_res > 0 and row_res > 0:
box = (width - col_res, height - row_res, width, height)
p = p.crop(box)
blank.paste(p, (width * j, height * k))
if Path(out_path_prefix).is_dir():
pass
else:
print("Out path is empty!!")
exit(0)
out_path = out_path_prefix + "\\" + img_path
blank.save(out_path)
# Postdam train18
# id_list=['2_12','2_13','2_14','3_12','3_13','3_14','4_12','4_13','4_14','4_15',
# '5_12','5_13','5_14','5_15','6_12','6_13','6_14','6_15','7_12','7_13']
# Postdam train24-benchmark
id_list=['2_13','2_14','3_13','3_14','4_13','4_14','4_15',
'5_13','5_14','5_15','6_13','6_14','6_15','7_13']
IMG_Path = Path("E:\code\hotkey\\17_post_proc\Postdam\\from-pytorch-train\\train24-val14\mv3_1_true_2_res50_data15\pred")
Large_Path = Path("E:\code\hotkey\\17_post_proc\Postdam\\from-pytorch-train\\train24-val14\\val_gt_full")
Large_File = natsort.natsorted(list(Large_Path.glob("*.tif")), alg=natsort.PATH)
Large_Str = []
for j in Large_File:
Large_Str.append(str(j))
for k in tqdm(range(len(id_list))):
glob_target='*potsdam_'+id_list[k]+'_*.png'
IMG_File = natsort.natsorted(list(IMG_Path.glob(glob_target)), alg=natsort.PATH)
IMG_Str = []
for i in IMG_File:
IMG_Str.append(str(i))
pic_small=[]
for j in range(0,len(IMG_Str)):
pic_small.append(cv.cvtColor(cv.imread(IMG_Str[j], cv.IMREAD_COLOR), cv.COLOR_BGR2RGB))
large_img=cv.imread(Large_Str[k])
height,width,_=large_img.shape
out_path_prefix = "E:\code\hotkey\\17_post_proc\Postdam\\from-pytorch-train\\train24-val14\mv3_1_true_2_res50_data15\ceshi"
out_name='potsdam_'+id_list[k]+'_pred.png'
combine_one(pic_small,out_name,width,height)
| [
"461158649@qq.com"
] | 461158649@qq.com |
4e7326d8f0782d2450e71723a4e875d33608d65f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/429/usersdata/314/103216/submittedfiles/jogoDaVelha_BIB.py | b06d282cfbb729c7fadf5f046a39ffe6c78fd471 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | # -*- coding: utf-8 -*-
# Minha bib
def solicitaSimboloDoHumano ():
simboloJogador = input('Qual simbolo deseja ultilizar no jogo? ')
while (simboloJogador != 'O') and (simboloJogador != 'X'):
simboloJogador = input('Qual simbolo deseja ultilizar no jogo? ')
return simboloJogador
def mostraTabuleiro(matriz):
print(matriz[0][0] + ' | ' + matriz[0][1] + ' | ' + matriz[0][2])
print(matriz[1][0] + ' | ' + matriz[1][1] + ' | ' + matriz[1][2])
print(matriz[2][0] + ' | ' + matriz[2][1] + ' | ' + matriz[2][2])
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
5c85fc1b4bee269efa7bb0ba55af1962e69224e0 | a753cdc01eb1e31835fcafa35b33ccdf237ab343 | /manage.py | 076b376992788d06d28a9ba6f3f95b4766ba0439 | [] | no_license | nawarazpokhrel/gces_backend | 919921e5fb24864c44633ea8a92471bae56cbd96 | 38a82768dc1733724302ceb52ba76e3fe3fb9c58 | refs/heads/master | 2023-06-19T08:10:02.508244 | 2021-07-14T12:35:46 | 2021-07-14T12:35:46 | 378,985,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gces_backend.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"pokhrelnawaraz@yahoo.com"
] | pokhrelnawaraz@yahoo.com |
6bfd57cf8429b4aba8742d9762d3b57922129d71 | c7979f4f6435fe8d0d07fff7a430da55e3592aed | /ABC015/D.py | 3bfd86c65ff0d9c6196a1bb441bc21f026524029 | [] | no_license | banboooo044/AtCoder | cee87d40bb98abafde19017f4f4e2f984544b9f8 | 7541d521cf0da848ecb5eb10ffea7d75a44cbbb6 | refs/heads/master | 2020-04-14T11:35:24.977457 | 2019-09-17T03:20:27 | 2019-09-17T03:20:27 | 163,818,272 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | #高橋くんの苦悩
#動的計画法
# dp[i][width] := (maxValue,useNum)
if __name__ == "__main__":
INF = float('inf')
W = int(input())
N,K = map(int,input().split(" "))
dp = [[0]*(W+1) for _ in range(K+1)]
A = [0] * N
B = [0] * N
for i in range(N):
a , b= map(int,input().split(" "))
A[i] = a
B[i] = b
past_max = 0
for i in range(N,0,-1):
for useNum in range(min([N-i+1,K]),0,-1):
for width in range(W+1):
if width - A[i-1] >= 0 and useNum <= K:
dp[useNum][width] = max([dp[useNum-1][width - A[i-1]] + B[i-1],dp[useNum][width]])
past_max = max([past_max,dp[useNum][width]])
print(past_max)
| [
"touhoucrisis7@gmail.com"
] | touhoucrisis7@gmail.com |
13217c599ddbbc506b123e6abbfb3f37f5f42da3 | 8d9318a33afc2c3b5ca8ac99fce0d8544478c94a | /Books/Casandra DB/opscenter-5.1.0/lib/py-redhat/2.6/shared/i386/twisted/protocols/stateful.py | 2920700df1e71347c81753e05484b2c2b01d9b43 | [] | no_license | tushar239/git-large-repo | e30aa7b1894454bf00546312a3fb595f6dad0ed6 | 9ee51112596e5fc3a7ab2ea97a86ec6adc677162 | refs/heads/master | 2021-01-12T13:48:43.280111 | 2016-11-01T22:14:51 | 2016-11-01T22:14:51 | 69,609,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | ../../../../../../py-unpure/twisted/protocols/stateful.py | [
"tushar239@gmail.com"
] | tushar239@gmail.com |
45a2ef273b285ebb4a63dbd20557cb357d754d49 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/TMU-MIB.py | 27e8099def3337d1df4cb10910615e02d7335a47 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 127,648 | py | #
# PySNMP MIB module TMU-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TMU-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:23:46 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
internet, iso, Integer32, Gauge32, mgmt, IpAddress, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, MibIdentifier, TimeTicks, Bits, ModuleIdentity, NotificationType, ObjectIdentity, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "internet", "iso", "Integer32", "Gauge32", "mgmt", "IpAddress", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "MibIdentifier", "TimeTicks", "Bits", "ModuleIdentity", "NotificationType", "ObjectIdentity", "Counter32")
TextualConvention, MacAddress, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "MacAddress", "DisplayString")
mgmt = MibIdentifier((1, 3, 6, 1, 2))
private = MibIdentifier((1, 3, 6, 1, 4))
enterprises = MibIdentifier((1, 3, 6, 1, 4, 1))
fibronics = MibIdentifier((1, 3, 6, 1, 4, 1, 22))
tmu = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60))
tmuSystem = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 1))
tmuIntrfc = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 2))
tmuMatch = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 3))
tmuStations = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 4))
tmuPorts = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 5))
tmuProduction = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 6))
tmuSecurity = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 7))
tmuFunction = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 8))
fibTmuFunctRtpGrpVrsRun = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 8, 1))
fibTmuFunctRtpGrpVrsPerm = MibIdentifier((1, 3, 6, 1, 4, 1, 22, 60, 8, 2))
fibTmuNumEventErrs = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumEventErrs.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumEventErrs.setDescription("Cyclic counter. Indicates how many times the event-handler procedure has been called with the 'ERR' severity level or higher.")
fibTmuArpAgeTime = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 2), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuArpAgeTime.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuArpAgeTime.setDescription('The value of the system timer [in SYS-TICKs] at the last time ARP aging was carried out.')
fibTmuNumRarpUpdate = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRarpUpdate.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRarpUpdate.setDescription("Cyclic counter. Indicates how many times the system's IP address was updated because of a RARP message.")
fibTmuMainSwVersion = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMainSwVersion.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMainSwVersion.setDescription('United version for all software packages in this system.')
fibTmuEepromVersion = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuEepromVersion.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuEepromVersion.setDescription('Version of the eeprom package')
fibTmuEepromDeffective = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuEepromDeffective.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuEepromDeffective.setDescription('System information. Set when eeprom is found to be physically deffective and can not be read nor written to.')
fibTmuLastSysIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuLastSysIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuLastSysIfIndex.setDescription('Number of inteface via which the last ip message was received. Currently interfaces 1->3 are token ring interfaces and interface 4 is the serial/slip interface.')
fibTmuTrDrvVersion = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuTrDrvVersion.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuTrDrvVersion.setDescription('The software version of the token ring driver as received from the driver at initialization.')
fibTmuSccDrvVersion = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuSccDrvVersion.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuSccDrvVersion.setDescription('The software version of the serial line driver as received from the driver at initialization.')
fibTmuSnmpUsrVersion = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuSnmpUsrVersion.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuSnmpUsrVersion.setDescription('The software version of the sw package employing udp/ip/snmp and the preliminary reduction of messages from any of the interfaces. Loaded at initialization.')
fibTmuDisplayDebugMode = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("regular-mode", 1), ("special-debug-mode", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuDisplayDebugMode.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuDisplayDebugMode.setDescription('Flag indicating whether system is in a special debug mode. In that mode, extra code is executed to output indications on system state.')
fibTmuEeFaultsFormat = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("full-mode", 1), ("concise-mode", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuEeFaultsFormat.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuEeFaultsFormat.setDescription("Flag indicating whether faults stored in eeprom will each take a small eeprom space or a large eeprom space. If the 'small' option is chosen, only so many first bytes of the fault text are stored but the number of stored faults is larger. This value is stored in eeprom and becomes effective only after the next reset.")
fibTmuRunFaultsFormat = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("full-mode", 1), ("concise-mode", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuRunFaultsFormat.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuRunFaultsFormat.setDescription('This is the run time value of flag described in eeFaultsFormat above.')
fibTmuFunctionalityVersion = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctionalityVersion.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctionalityVersion.setDescription('The software version of the sw package which realizes the recovery process when the token ring fails (e.g. beaconing). Loaded at initialization.')
fibTmuUtilitiesVersion = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuUtilitiesVersion.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuUtilitiesVersion.setDescription('The software version of the sw package containing operating system and hw utilities. Loaded at initialization.')
fibTmuWrapIn = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("no-wrap", 1), ("wrap-set-by-manager", 2), ("wrap-set-by-Tmu", 3), ("wrap-set-by-both", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuWrapIn.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuWrapIn.setDescription("Get: Read the state of the specified relay (wrap on 'ring in' port on trunk). Wrap may be set either by the manager or by the Tmu due to some errors on the trunk. Set: Only two values are allowed. 'no-wrap' indicates 'open the relay and attach the trunk' 'wrap-set-by-manager' indicates 'detach the trunk' ")
fibTmuWrapOut = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("no-wrap", 1), ("wrap-set-by-manager", 2), ("wrap-set-by-Tmu", 3), ("wrap-set-by-both", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuWrapOut.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuWrapOut.setDescription("Get: Read the state of the specified relay (wrap on 'ring Out' port on trunk). Wrap may be set either by the manager or by the Tmu due to some errors on the trunk. Set: Only two values are allowed. 'no-wrap' indicates 'open the relay and attach the trunk' 'wrap-set-by-manager' indicates 'detach the trunk' ")
fibTmuLoadEeDefaults = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-load-default", 1), ("load-defaults", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuLoadEeDefaults.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuLoadEeDefaults.setDescription("When this object is set, the Tmu system is instructed to load non volatile memory by default values at the next reset. Get: Read the value the system will use on the next reset. Set: If 'load-defults' is written then at the next system reset, non volatile ram will be loaded by default values and the object will automatically be loaded by 'dont-load-defaults'. Note that, among other things, Tmu will lose, in this case, its IP address.")
fibTmuDontResetFatal = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("reset-fatal", 1), ("dont-reset-fatal", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuDontResetFatal.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuDontResetFatal.setDescription("When this object is set, the Tmu system is instructed to reset the system when a fatal error has been detected. For debug purposes. In regular cases, this variable is set to 'reset-fatal'.")
fibTmuRIConnection = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("copper", 1), ("fiber", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuRIConnection.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuRIConnection.setDescription('GET: Read what kind of trank is connected to Ring In port. At board type 9232/nn has no value.')
fibTmuROConnection = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("copper", 1), ("fiber", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuROConnection.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuROConnection.setDescription('GET: Read what kind of trank is connected to Ring Out port. At board type 9232/nn has no value.')
fibTmuRealTimeClock = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 1, 22), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(23, 23)).setFixedLength(23)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuRealTimeClock.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuRealTimeClock.setDescription('Get: Read real time with format hh:mm:ss DDD dd/MMM/yy. Set: Recive Display string of format hh:mm:ss DDD dd/MMM/yy.')
fibTmuIntrfcTable = MibTable((1, 3, 6, 1, 4, 1, 22, 60, 2, 1), )
if mibBuilder.loadTexts: fibTmuIntrfcTable.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuIntrfcTable.setDescription('Table containing information and control items concerning a specific IP handling interface.')
fibTmuIntrfcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1), ).setIndexNames((0, "TMU-MIB", "fibTmuIntrfcIndex"))
if mibBuilder.loadTexts: fibTmuIntrfcEntry.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuIntrfcEntry.setDescription('Entry of information and control items concerning a specific IP handling interface.')
fibTmuIntrfcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuIntrfcIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuIntrfcIndex.setDescription('The value of this object identifies the IP interface for which this entry contains management information. The value of this object for a particular interface has the same value as the ifIndex object defined for the same interface.')
fibTmuNumRarpRcvd = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRarpRcvd.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRarpRcvd.setDescription('Cyclic counter. Contains the number of rarp response messages received via this interface. Meaningful for Token Ring interfaces only.')
fibTmuNumRxRjctMem = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRxRjctMem.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRxRjctMem.setDescription('Cyclic counter. Contains the number of non-mac messages which were received by the tr chip but rejected by the higher sw levels because there was no memory.')
fibTmuNumRxAccepted = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRxAccepted.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRxAccepted.setDescription('Cyclic counter. Contains the number of non-mac frames received by higher level sw.')
fibTmuNumRxBdcst = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRxBdcst.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRxBdcst.setDescription('Cyclic counter. Contains the number of non-mac broadcast frames received by higher level SW. Meaningful for Token Ring interfaces only.')
fibTmuNumRxSpecific = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRxSpecific.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRxSpecific.setDescription('Cyclic counter. Contains the number of non-mac specific [non-broadcast, non-functional, non-group] frames received by higher level SW. Meaningful for Token Ring interfaces only.')
fibTmuNumRifIncluded = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRifIncluded.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRifIncluded.setDescription('Cyclic counter. Contains the number of non-mac frames with routing info field received by higher level SW. Meaningful for Token Ring interfaces only.')
fibTmuNumNoRif = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumNoRif.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumNoRif.setDescription('Cyclic counter. Contains the number of non-mac frames without routing info field received by higher level SW. Meaningful for Token Ring interfaces only.')
fibTmuNumNonSnap = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumNonSnap.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumNonSnap.setDescription('Cyclic counter. Contains the number of non-mac frames which were not snap messages and which were handed over to higher level SW by the mac level SW. These messages are rejected. Meaningful for Token Ring interfaces only.')
fibTmuNumUnknownType = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumUnknownType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumUnknownType.setDescription("Cyclic counter. Contains the number of non-mac frames which had an unknown 'TYPE' field in the snap header and which were handed over to higher level SW by the mac level SW. These messages are rejected. Meaningful for Token Ring interfaces only.")
fibTmuNumRifLong = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRifLong.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRifLong.setDescription('Cyclic counter. Contains the number of non-mac frames which had a routing information field [rif] which was longer than the UDPIP package can take [for learning]. Meaningful for Token Ring interfaces only.')
fibTmuNumRrpRjctIp = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRrpRjctIp.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRrpRjctIp.setDescription('Cyclic counter. Contains the number of rarp messages which were rejected because the message contents was illegal [e.g. illegal ip address]. Meaningful for Token Ring interfaces only.')
fibTmuNumArpRcvd = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumArpRcvd.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumArpRcvd.setDescription('Cyclic counter. Contains the number of arp response messages received via this interface. Meaningful for Token Ring interfaces only.')
fibTmuNumIpRcvd = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumIpRcvd.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumIpRcvd.setDescription('Cyclic counter. Contains the number of ip response messages received via this interface.')
fibTmuNumIfDown = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumIfDown.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumIfDown.setDescription("Cyclic counter. Number of times a message was directed to an interface which was 'down'.")
fibTmuNumOwnBrdcst = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumOwnBrdcst.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumOwnBrdcst.setDescription("Cyclic counter. Contains the number of broadcast frames which were received in one of the system's macs and which were very same messages sent by one of the system's macs. The mac hw receives broadcast messages even if they were sent by the mac's own 48 bits address. These messages are rejected. Meaningful for Token Ring interfaces only.")
fibTmuAc00Cntr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuAc00Cntr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuAc00Cntr.setDescription("Cyclic counter. Contains the number of frames which were received with the 'A' and 'C' bits both '0'. Meaningful for Token Ring interfaces only.")
fibTmuAc01Cntr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuAc01Cntr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuAc01Cntr.setDescription("Cyclic counter. contains the number of frames which were received with the 'A' and 'C' bits equal to '0' and '1'. Meaningful for Token Ring interfaces only.")
fibTmuAc10Cntr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuAc10Cntr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuAc10Cntr.setDescription("Cyclic counter. Contains the number of frames which were received with the 'A' and 'C' bits equal to '1' and '0'. Meaningful for Token Ring interfaces only.")
fibTmuAc11Cntr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuAc11Cntr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuAc11Cntr.setDescription("Cyclic counter. Contains the number of frames which were received with the 'A' and 'C' bits equal to '1' and '1'.")
fibTmuParityEc = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuParityEc.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuParityEc.setDescription('Cyclic counter. Contains the number of times scc reported parity error [per character]. Meaningful for serial intefaces only.')
fibTmuFrameEc = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFrameEc.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFrameEc.setDescription('Cyclic counter. Contains the number of times scc reported framing error [per character]. Meaningful for serial intefaces only.')
fibTmuRxNoiseEc = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuRxNoiseEc.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuRxNoiseEc.setDescription('Cyclic counter. Contains the number of times scc reported noise error [per character]. Meaningful for serial intefaces only.')
fibTmuBreakEc = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuBreakEc.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuBreakEc.setDescription('Cyclic counter. Contains the number of times scc reported break character was received. Meaningful for serial intefaces only.')
fibTmuNumConfigNotValid = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumConfigNotValid.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumConfigNotValid.setDescription("Cyclic counter. Contains the number of times get-naun or get-config of tr driver reported 'MAC-ERR-CONFIG-NOT-VALID'. Meaningful for Token Ring interfaces only.")
fibTmuNumAddrNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumAddrNotFound.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumAddrNotFound.setDescription("Cyclic counter. Contains the number of times get-naun or get-config of tr driver reported 'MAC-ERR-ADDR-NOT-FOUND'. Meaningful for Token Ring interfaces only.")
fibTmuNumProcessDisabled = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumProcessDisabled.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumProcessDisabled.setDescription("Cyclic counter. Contains the number of times get-naun or get-config of tr driver reported 'MAC-ERR-PROCESS-DISABLED'. Meaningful for Token Ring interfaces only.")
fibTmuNumBeaconsRcvd = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumBeaconsRcvd.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumBeaconsRcvd.setDescription(' Cyclic counter. Contains the number of times tr driver reported receiving a beacon on this interface. Meaningful for Token Ring interfaces only.')
fibTmuLastBeaconTime = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 29), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuLastBeaconTime.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuLastBeaconTime.setDescription('Time [in sys timer units] when last beacon was reported on this intrerface. Meaningful for Token Ring interfaces only.')
fibTmuLastBeaconType = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 30), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuLastBeaconType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuLastBeaconType.setDescription('Type of the last beacon reported on this intrerface. Meaningful for Token Ring interfaces only.')
fibTmuLastBeaconAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 31), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuLastBeaconAddr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuLastBeaconAddr.setDescription("Mac address of the last station which reported 'beacon' on this interface. Meaningful for Token Ring interfaces only.")
fibTmuLastBeaconNaun = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 32), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuLastBeaconNaun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuLastBeaconNaun.setDescription("Mac address of the next upsream neighbour of the last station which reported 'beacon' on this interface. Meaningful for Token Ring interfaces only.")
fibTmuNumRxGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRxGroup.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRxGroup.setDescription('Cyclic counter. Contains the number of non-mac group frames received by higher level SW. Meaningful for Token Ring interfaces only.')
fibTmuNumRxFunctional = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 2, 1, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumRxFunctional.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumRxFunctional.setDescription('Cyclic counter. Contains the number of non-mac functional frames received by higher level SW. Meaningful for Token Ring interfaces only.')
fibTmuMatchUpdateTime = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 1), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchUpdateTime.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchUpdateTime.setDescription('Time [in sys timer units] from startup at which the table below has been updated. See remark in fibTmuMatchListValid below.')
fibTmuMatchNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchNumEntries.setDescription('Number of meaningful entries in the match array. The index of the last meaningful entry is num-entries. See remark in fibTmuMatchListValid below.')
fibTmuMatchNumTmuEntries = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchNumTmuEntries.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchNumTmuEntries.setDescription('Number of meaningful entries in the match array which belong to stations attached to the tmu via tau ports. This does not include the two (or one) chipsets themselves. Example: if second chip index is 3 and first chip index is 1 then the number of cau entries is 1. See remark in fibTmuMatchListValid below.')
fibTmuMatchFirstChipIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchFirstChipIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchFirstChipIndex.setDescription('The index in the match table at which the the first chipset is located (order of stations on match table is downstream, i.e. in the direction messages are transported). See remark in fibTmuMatchListValid below.')
fibTmuMatchSecondChipIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchSecondChipIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchSecondChipIndex.setDescription('The index in the match table at which the the second chipset is located (order of stations on match table is downstream, i.e. in the direction messages are transported). See remark in fibTmuMatchListValid below.')
fibTmuMatchActMonIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchActMonIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchActMonIndex.setDescription('The index in the match table at which the active monitor is located (order of stations on match table is downstream, i.e. in the direction messages are transported). See remark in fibTmuMatchListValid below.')
fibTmuMatchConfigChipIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchConfigChipIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchConfigChipIndex.setDescription('The index in the match table from which the information for the configuration was collected (order of stations on match table is downstream, i.e. in the direction messages are transported). See remark in fibTmuMatchListValid below.')
fibTmuMatchListValid = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, -1))).clone(namedValues=NamedValues(("valid", 2), ("not-valid", -1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchListValid.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchListValid.setDescription("Flag indicating whether the information in the tmuMatch group is valid or not. If information is not valid, ALL objects will return with a 'no such object' response.")
fibTmuMatchUpToDate = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 1))).clone(namedValues=NamedValues(("up-to-date", 2), ("not-up-to-date", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchUpToDate.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchUpToDate.setDescription("Flag. If set '1' then the information in All objects of tmuMatch group is not up to date any more: Some changes have occurred and a new, up to date matching configuration can not be supplied. In that case, the information in tmuMatch group remains true for the update time as specified above but not for the current time. See remark in fibTmuMatchListValid above.")
fibTmuMatchNoMatchReason = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 3, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("empty-reason", 1), ("malloc-fail", 2), ("segment-unstable", 3), ("no-chip-on-list", 4), ("inconsistency", 5), ("soft-01", 6), ("soft-02", 7), ("first-not-found", 8), ("second-not-found", 9), ("found-twice", 10), ("soft-03", 11), ("ports-gt-stations", 12), ("stations-gt-ports", 13), ("no-tr-list", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchNoMatchReason.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchNoMatchReason.setDescription("Indication meaningful only if fibTmuMatchUpToDate above is not up-to-date. empty-reason - Either there is a list or the reason for the fact that there is no list is yet unknown. malloc-fail - System could not provide memory for this list. segment-unstable - Indications from the token ring chip sets were that the ring has not reached a stable state yet. no-chip-on-list - Chip set handling software could not indicate which of the Tmu chips is on the portion of token ring which is on the Tau ports. inconsistency - Inconsistent responses from low level token ring handler indicate ring is not yet stable. soft-01 - Software error. system could not retrieve a mac address related to a specified tr interface. soft-02 - Software error. system could not retrieve a mac address related to a specified tr interface. first-not-found - First chip set of Tmu could not be found on the list of addresses supplied by the token ring driver. Ring is not stable yet. second-not-found - Second chip set of Tmu could not be found on the list of addresses supplied by the token ring driver. Ring is not stable yet. found-twice - One chip set of Tmu was found twice on the list of addresses supplied by the token ring driver. Ring is not stable yet. soft-03 - Software indicated a match could be found but could not supply the actula list. Internal sw error. ports-gt-stations - Number of ports was fould to be higher than the number of stations. This is only reason for not construction the 'match' table. stations-gt-ports - Number of stations was fould to be higher than the number of ports. This is only reason for not construction the 'match' table. no-tr-list - No configuration is currently available from the token ring driver. Ring is unstable or some sw error. See remark in fibTmuMatchListValid above.")
fibTmuMatchTable = MibTable((1, 3, 6, 1, 4, 1, 22, 60, 3, 20), )
if mibBuilder.loadTexts: fibTmuMatchTable.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchTable.setDescription('Table containing information on the location of stations (identified by their physical address in terms of Tmu/Tau ports).')
fibTmuMatchEntry = MibTableRow((1, 3, 6, 1, 4, 1, 22, 60, 3, 20, 1), ).setIndexNames((0, "TMU-MIB", "fibTmuMatchIndex"))
if mibBuilder.loadTexts: fibTmuMatchEntry.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchEntry.setDescription('Entry of information concerning a single station and its port (on Tmu/Tau).')
fibTmuMatchIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 3, 20, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchIndex.setDescription('The value of this object identifies the the number of the entry in the table. See remark in fibTmuMatchListValid above')
fibTmuMatchTauId = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 3, 20, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchTauId.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchTauId.setDescription('Number of the tau port on which station is attached. Tau ports are connectors on the tmu. The may have either a tau connected or a single station. See fibTmuMatchPortId below. If this object is not valid for the specified instance (station is not on the Tmu but on the trunk), return value is 0xFFFFFFFF (-1). See remark in fibTmuMatchListValid above')
fibTmuMatchPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 3, 20, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchPortId.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchPortId.setDescription("If greater than 1 then 'fibTmuMatchTauId' is the number of the port on the tmu and 'fibTmuMatchPortId' is the number of the port on the related Tau (starting with port #1 for fibTmuMatchPortId = 2). If less than or equal to 1 then it is a One-Station-Port Indicating that one station is directly attached to the port specified by 'fibTmuMatchTauId'. The information contained in values smaller than 2 may also be deduced from 'fibTmuMatchStationInfow' below. If this object is not valid for the specified instance (station is not on the Tmu but on the trunk), return value is 0xFFFFFFFF (-1). See remark in fibTmuMatchListValid above")
fibTmuMatchPhysAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 3, 20, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchPhysAddr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchPhysAddr.setDescription('The token ring physical address of the station which is attached to the specified port. See remark in fibTmuMatchListValid above')
fibTmuMatchStationInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 3, 20, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchStationInfo.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchStationInfo.setDescription("Information on the attached station: If BIT0 is set (value = 0x01) then this is definitely a 'sniffer' port (i.e. a station which is sending 'trace tool present' messages is located on this port). If BIT1 is set (value = 0x02) then its location is doubtful (System has been unable to resolve the location of this station). If BIT2 is set (value = 0x04) then station is not attached to one of the tmu's ports but rather to the trunk. If bit3 is set (value = 0x08) then station is one of the tmu's chips. If BIT4 is set (value = 0x10) then this is a station attached to the tmu (and not on the trunk). See remark in fibTmuMatchListValid above")
fibTmuStationsUpdateTime = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 1), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsUpdateTime.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsUpdateTime.setDescription('Time [in sys timer units] from startup at which the table below has been updated. See remark in fibTmuStationsListValid below.')
fibTmuStationsNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsNumEntries.setDescription('Number of meaningful entries in the Stations array. The index of the last meaningful entry is num-entries. See remark in fibTmuStationsListValid below.')
fibTmuStationsNumTmuEntries = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsNumTmuEntries.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsNumTmuEntries.setDescription('Number of meaningful entries in the Stations array which belong to stations attached to the tmu via tau ports. This does not include the two (or one) chipsets themselves. Example: if second chip index is 3 and first chip index is 1 then the number of Tmu entries is 1. See remark in fibTmuStationsListValid below.')
fibTmuStationsFirstChipIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsFirstChipIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsFirstChipIndex.setDescription('The index in the Stations table at which the the first chipset is located (order of stations on Stations table is downstream, i.e. in the direction messages are transported). See remark in fibTmuStationsListValid below.')
fibTmuStationsSecondChipIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsSecondChipIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsSecondChipIndex.setDescription('The index in the Stations table at which the the second chipset is located (order of stations on Stations table is downstream, i.e. in the direction messages are transported). See remark in fibTmuStationsListValid below.')
fibTmuStationsActMonIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsActMonIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsActMonIndex.setDescription('The index in the Stations table at which the active monitor is located (order of stations on Stations table is downstream, i.e. in the direction messages are transported). See remark in fibTmuStationsListValid below.')
fibTmuStationsConfigChipIndex = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsConfigChipIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsConfigChipIndex.setDescription('The index in the Stations table from which the information for the configuration was collected (order of stations on Stations table is downstream, i.e. in the direction messages are transported). See remark in fibTmuStationsListValid below.')
fibTmuStationsStationsListValid = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, -1))).clone(namedValues=NamedValues(("valid", 2), ("not-valid", -1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsStationsListValid.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsStationsListValid.setDescription("Flag indicating whether the information in the tmuStations group is valid or not. If information is not valid, ALL objects will return with a 'no such object' response.")
fibTmuStationsUpToDate = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 1))).clone(namedValues=NamedValues(("up-to-date", 2), ("not-up-to-date", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsUpToDate.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsUpToDate.setDescription("Flag. If set '1' then the information in All objects of tmuStations group is not up to date any more: Some changes have occurred and a new, up to date Stationsing configuration can not be supplied. In that case, the information in tmuStations group remains true for the update time as specified above but not for the current time. See remark in fibTmuStationsListValid above.")
fibTmuMatchNoStationsListReason = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 4, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 14))).clone(namedValues=NamedValues(("empty-reason", 1), ("malloc-fail", 2), ("segment-unstable", 3), ("no-chip-on-list", 4), ("inconsistency", 5), ("soft-01", 6), ("soft-02", 7), ("first-not-found", 8), ("second-not-found", 9), ("found-twice", 10), ("no-tr-list", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuMatchNoStationsListReason.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuMatchNoStationsListReason.setDescription('Indication meaningful only if fibTmuStationsUpToDate above is not up-to-date. empty-reason - Either there is a list or the reason for the fact that there is no list is yet unknown. malloc-fail - System could not provide memory for this list. segment-unstable - Indications from the token ring chip sets were that the ring has not reached a stable state yet. no-chip-on-list - Chip set handling software could not indicate which of the Tmu chips is on the portion of token ring which is on the Tau ports. inconsistency - Inconsistent responses from low level token ring handler indicate ring is not yet stable. soft-01 - Software error. system could not retrieve a mac address related to a specified tr interface. soft-02 - Software error. system could not retrieve a mac address related to a specified tr interface. first-not-found - First chip set of Tmu could not be found on the list of addresses supplied by the token ring driver. Ring is not stable yet. second-not-found - Second chip set of Tmu could not be found on the list of addresses supplied by the token ring driver. Ring is not stable yet. found-twice - One chip set of Tmu was found twice on the list of addresses supplied by the token ring driver. Ring is not stable yet. no-tr-list - No configuration is currently available from the token ring driver. Ring is unstable or some sw error. See remark in fibTmuStationsListValid above.')
fibTmuStationsTable = MibTable((1, 3, 6, 1, 4, 1, 22, 60, 4, 20), )
if mibBuilder.loadTexts: fibTmuStationsTable.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsTable.setDescription('Table containing information on the location of stations (identified by their physical address).')
fibTmuStationsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 22, 60, 4, 20, 1), ).setIndexNames((0, "TMU-MIB", "fibTmuStationsIndex"))
if mibBuilder.loadTexts: fibTmuStationsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsEntry.setDescription('Entry of information concerning a single station.')
fibTmuStationsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 4, 20, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsIndex.setDescription('The value of this object identifies the the number of the entry in the table. See remark in fibTmuStationsListValid above')
fibTmuStationsPhysAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 4, 20, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsPhysAddr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsPhysAddr.setDescription('The token ring physical address of the station which is at the specified index. See remark in fibTmuStationsListValid above')
fibTmuStationsStationInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 4, 20, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuStationsStationInfo.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuStationsStationInfo.setDescription("Information on the attached station: If BIT0 is set (value = 0x01) then this is definitely a 'sniffer' port (i.e. a station which is sending 'trace tool present' messages is located on this port). If BIT1 is set (value = 0x02) then its location is doubtful (System has been unable to resolve the location of this station). If BIT2 is set (value = 0x04) then station is not attached to one of the tmu's ports but rather to the trunk. If bit3 is set (value = 0x08) then station is one of the tmu's chips. If BIT4 is set (value = 0x10) then this is a station attached to the tmu (and not on the trunk). See remark in fibTmuStationsListValid above")
fibTmuPortsNumRelayOpen = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 5, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumRelayOpen.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumRelayOpen.setDescription('Number of ports attached to single stations either directly on the tmu or on a tau which have their relay open. A relay remains open as long as there is an attached station and it not a source of some problem.')
fibTmuPortsNumPhantomPresent = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 5, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumPhantomPresent.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumPhantomPresent.setDescription('Number of ports attached to single stations (either directly on the tmu or on a tau) which have their phantom up.')
fibTmuPortsNumPortsPerTmu = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 5, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumPortsPerTmu.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumPortsPerTmu.setDescription('Number of ports on one tmu (both ports which support a tau and ports which may support only single stations).')
fibTmuPortsNumTausPerTmu = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 5, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumTausPerTmu.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumTausPerTmu.setDescription("The maximal number of Tau's which can be connected to one Tmu")
fibTmuPortsNumPortsPerTau = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 5, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumPortsPerTau.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumPortsPerTau.setDescription('The maximal number of ports on one Tau')
fibTmuPortsMaxNumTauErrs = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 5, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsMaxNumTauErrs.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsMaxNumTauErrs.setDescription('Number of errors (which cause temporary relay closing) allowed after phantom goes up on a single Tau port and before its relay is closed (port detached). Relay is reopened (port attached) after phantom goes down and up again.')
fibTmuPortsMaxFirstTimeout = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 5, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsMaxFirstTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsMaxFirstTimeout.setDescription('Number of timeouts allowed before it is decided that communication with Tau is really faulty. System moves from state of connection into no-connection and attempts will be made to determine the type of connection (Tau or single).')
fibTmuPortsTauTable = MibTable((1, 3, 6, 1, 4, 1, 22, 60, 5, 29), )
if mibBuilder.loadTexts: fibTmuPortsTauTable.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauTable.setDescription("Table containing information on ports which are directly on Tmu and on Tau's which may be connected to them.")
fibTmuPortsTauEntry = MibTableRow((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1), ).setIndexNames((0, "TMU-MIB", "fibTmuPortsTmuPort"))
if mibBuilder.loadTexts: fibTmuPortsTauEntry.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauEntry.setDescription('Entry of information concerning a single port on Tmu and the attached station/Tau.')
fibTmuPortsTmuPort = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsTmuPort.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTmuPort.setDescription('Number of port (on TMU) described in this array entry. Port numbers increase in the direction of the token.')
fibTmuPortsPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("station-or-Tau", 1), ("station-only", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsPortType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsPortType.setDescription('Flag indicating whether this port supports both a Tau and a single station or just a single station.')
fibTmuPortsEndConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("undefined", 1), ("tau-connected", 2), ("single-connected", 3), ("passive-connected", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsEndConnection.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsEndConnection.setDescription('Flag indicating whether this port is connected to a Tau or to a single station (or is in a state where it is not clear exactly what is connected to the specified port).')
fibTmuPortsPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsPortStatus.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsPortStatus.setDescription('Bit map indicating status of the port: Bit3 (0x08) - If set indicates that the relay of this port is open (and, therefore, the station/Tau connected to this port is on the token ring). Bit4 (0x10) - If set indicates that phantom signal was detected at this port (and, therefore, a station/Tau is connected to this port). Bit5 (0x20) - If set indicates that the relay of this port is closed because a command was received to do so from an snmp manager. Bit6 (0x40) - If set then port has been temporarily closed by Tmu because it was decided that either the Tau or the communication line is faulty. System may still revert to attaching the port if faulty state is overcome. This state may also be entered because Tmu received indication that the ring was not stable. As long as this is the only reason for the port to be closed, system will continually try to revert to attaching the port. Bit7 (0x80) - If set then port has been closed because it was found to create too many errors. Port will remain closed until phantom goes down and up again. Bit9 (0x200) - Port is detached because of security violation and will remain so until phantom goes down and up again or until manager releases the lock.')
fibTmuPortsManagerCloseRun = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("open", 1), ("close", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsManagerCloseRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsManagerCloseRun.setDescription("When setting this object to the value of close (2) the indicated port on Tmu will be closed, the related led will start blinking at a slow rate and port will not be opened before it is set to the value of open (1). Setting to the value of open (1) does not guarantee the opening of the indicated Tmu port but will only allow the Tmu system to let this port into the ring if it obeys the rules of token ring. Reading this object returns the value last set by the manager (or the default value). This command holds only until the next system reset. To make sure it remains valid after reset, use 'fibTmuPortsManagerClosePerm'")
fibTmuPortsManagerClosePerm = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("open", 1), ("close", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsManagerClosePerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsManagerClosePerm.setDescription("When setting this object to the value of close (2) the indicated port on Tmu will be closed after the next system reset. see 'fibTmuPortsManagerCloseRun'. Setting to the value of open (1) will only allow the Tmu system to let this port into the ring (after the next reset) if it obeys the rules of token ring. Reading this object returns the value last set by the manager (or the default value).")
fibTmuPortsNumConsequentErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumConsequentErrs.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumConsequentErrs.setDescription("Counter. Number of times an error has been detected on this Tau port (one after the other, without a 'no error' state in between) since phantom last came up. When the number of errors reaches fibTmuPortsMaxNumTauErrs, relay is closed and is not opened until phantom goes down and up again.")
fibTmuPortsNumTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumTimeout.setDescription('Number of times communication with Tau has been attempted immediately after pahntom has gone up. Attempt go on for MaxFirstTimeout times. After that, it is decided that a regular station is connected and this will remain the state until phantom goes down and up again.')
fibTmuPortsTauPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("no-connection", 1), ("waiting-to-open", 2), ("active-connection", 3), ("getting-config", 4), ("initializing", 5), ("pre-active", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsTauPortState.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauPortState.setDescription('This is a status byte. it indicates the state of the port as far as software is concerned. A port which may only carry a single station can possibly be in one of two states: no-connection (1) waiting-to-open (2) active-connection (3) A port which may carry either a Tau or a single station can possibly be in one of the following states: no-connection (1) getting-config (4) initializing (5) pre-active (6) active-connection (3)')
fibTmuPortsNumPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumPorts.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumPorts.setDescription("The number of ports on the Tau connected to this port. Set to '1' when a station is directly connected to this port. Only meaningful when 'fibTmuPortsTauPortState' is 'active-connection'")
fibTmuPortsNumAttached = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumAttached.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumAttached.setDescription("This is the number of ports on the connected Tau (if there is any) that have their relay open (station is attached to the ring). Only meaningful when 'fibTmuPortsTauPortState' is 'active-connection'")
fibTmuPortsNumPhantomUp = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumPhantomUp.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumPhantomUp.setDescription("This is the number of ports on the connected Tau (if there is any) that have their phantom in 'up' state. Only meaningful when 'fibTmuPortsTauPortState' is 'active-connection'")
fibTmuPortsTauRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsTauRevision.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauRevision.setDescription("This is the Tau revision as reported by the Tau itself (if there is one). Only meaningful when 'fibTmuPortsTauPortState' is 'active-connection'.")
fibTmuPortsModuleId = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsModuleId.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsModuleId.setDescription("This is a hw id attached to each of its modules by the Tau. each octet is related to one module (up to 4 modules). Number of meaningful octets is determined by fibTmuPortsNumModules below. Only meaningful when 'fibTmuPortsTauPortState' is 'active-connection'")
fibTmuPortsNumModules = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsNumModules.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsNumModules.setDescription("This is the number of modules on the attached Tau as reported by the Tau itself. Only meaningful when 'fibTmuPortsTauPortState' is 'active-connection'.")
fibTmuPortsTauMode = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsTauMode.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauMode.setDescription("This is a bit map of information on the status of the Tau and the way it is set. Details of the various bits: Bit0 (= 0x01) If '1' then Tau is set to a rate of 16 mega bits per second on token ring. Otherwise, it is set to a rate of 4 mega bits per second. Only meaningful when 'fibTmuPortsTauPortState' is 'active-connection'.")
fibTmuPortsLedState = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("slow-rate", 1), ("fast-rate", 2), ("fixed-on", 3), ("fixed-off", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsLedState.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsLedState.setDescription('State of the indication led related to this port. Values can be: slow-rate (1) fast-rate (2) fixed-on (3) fixed-off (4)')
fibTmuPortsRequestType = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 17, 33, 49, 65, 81, 97, 113, 129, 145, 161))).clone(namedValues=NamedValues(("no-Tau-msg", 1), ("get-configuration-msg", 17), ("get-phantom-msg", 33), ("get-relay-msg", 49), ("attach-detach-msg", 65), ("set-beacon-msg", 81), ("set-active-msg", 97), ("get-all-msg", 113), ("get-changes-msg", 129), ("reset-Tau-msg", 145), ("self-test-Tau-msg", 161)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsRequestType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsRequestType.setDescription("This is the type of the message sent to the attached Tau for which a response is required. If request-type is 'no-Tau-msg' then no message was sent and, therefore, no request is expected. no-Tau-msg (=0x01) get-configuration-msg (=0x11) get-phantom-msg (=0x21) get-relay-msg (=0x31) attach-detach-msg (=0x41) set-beacon-msg (=0x51) set-active-msg (=0x61) get-all-msg (=0x71) get-changes-msg (=0x81) reset-Tau-msg (=0x91) self-test-Tau-msg (=0xA1)")
fibTmuPortsLastTxTime = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 22), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsLastTxTime.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsLastTxTime.setDescription('This is the system timer value at the last tx-to-Tau on this Tau port (provided a Tau is attached to it).')
fibTmuPortsLastPollTime = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 23), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsLastPollTime.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsLastPollTime.setDescription('This is the system timer value at the last poll response on this Tau port (provided a Tau is attached to it). If no polling has so far been carried out, a value of 0 is returned.')
fibTmuPortsUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 24), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsUpdateTime.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsUpdateTime.setDescription('This is the system timer value when phantom was last probed on this Tau port.')
fibTmuPortsAllowedAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 25), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsAllowedAddr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsAllowedAddr.setDescription('This is the one physical address allowed to be on this Tmu port (provided a single station is attached to it and not a Tau). The value loaded in fibTmuPortsAllowedAddrLoaded below determines the state of this object. Writing to this object stores the address in non-volatile memory. It also makes security on this port active provided fibTmuSecurityModeRun has bit0 set. Loading will not be carried out if fibTmuNumStationLeft is 0 (no more space in non-volatile memory). In that case, an error will be returned (genErr = 5).')
fibTmuPortsAllowedAddrLoaded = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 29, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("addr-not-loaded", 1), ("loaded-not-active", 2), ("loaded-and-active", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsAllowedAddrLoaded.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsAllowedAddrLoaded.setDescription("This is the status of fibTmuPortsAllowedAddr. When reading, the possible values are as follows: addr-not-loaded indicates the value in fibTmuPortsAllowedAddr is meaningless. No security address has been loaded for this port. loaded-not-active indicates the value in fibTmuPortsAllowedAddr is stored in non volatile memory but security action for this port is not active. This happens when address is loaded while security action for the Tmu system is disabled (fibTmuSecurityModeRun has bit0 reset). loaded-and-active indicates the value in fibTmuPortsAllowedAddr is loaded in non volatile memory and security is active on this port. This means that if a station is directly connected to this port then it must have the specified physical address or this port will automatically close. As for writing, this object can be written only if its value is either 'loaded-not-active' or 'loaded-and-active' and the only value allowed is addr-not-loaded. In that case, 'fibTmuPortsAllowedAddr' is erased from the non-volatile memory and 'fibTmuNumStationLeft' is incremented. If port has been closed because of security (fibTmuPortsPortStatus had bit9 set) then it will open (provided it has not been closed because of some other reason).")
fibTmuPortsAllTable = MibTable((1, 3, 6, 1, 4, 1, 22, 60, 5, 30), )
if mibBuilder.loadTexts: fibTmuPortsAllTable.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsAllTable.setDescription('Table containing information on ports which are directly connected to stations and on Tau ports which may be connected to stations.')
fibTmuPortsAllEntry = MibTableRow((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1), ).setIndexNames((0, "TMU-MIB", "fibTmuPortsTmuPortIndex"), (0, "TMU-MIB", "fibTmuPortsTauPortIndex"))
if mibBuilder.loadTexts: fibTmuPortsAllEntry.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsAllEntry.setDescription('Entry of information concerning a single port on Tmu or on Tau.')
fibTmuPortsTmuPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsTmuPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTmuPortIndex.setDescription('Number of port on Tmu. Port numbers increase in the direction of the token. range of valid values: 1->fibTmuPortsNumTausPerTmu')
fibTmuPortsTauPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsTauPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauPortIndex.setDescription('Number of port on Tau. Port numbers increase in the direction of the token. range of valid values: 1->fibTmuPortsNumTausPerTau')
fibTmuPortsPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsPortState.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsPortState.setDescription("Bit map describing the indicated port: Bit1 (0x02) - No such port. Index is out of bounds. Either a Tau is connected and the specified port number is larger than the number of ports on the Tau currently connected or a single station is directly attached and the specified port number is larger than 1. Bit2 (0x04) - It has not yet been determined what is connected to the specified Tmu port. It may later be found to be either a single station of a Tau. Bit3 (0x08) - Port is attached. This means that the token ring goes through the station related to this port. Bit4 (0x10) - Phantom signal is 'on' for this port. Bit5 (0x20) - Port is detached because of a request from manager. Bit6 (0x40) - Port is temporarily detached because of a temporary error or because it is in a transition state. Bit7 (0x80) - Port is detached because of an error and will remain so until phantom goes down and up again. Bit8 (0x100) - Port is a Tmu port attached to a single station. Bit9 (0x200) - Port is detached because of security violation and will remain so until phantom goes down and up again or until manager releases the lock.")
fibTmuPortsGenCloseRun = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("open", 1), ("close", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsGenCloseRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsGenCloseRun.setDescription("When setting this object to the value of close (2) the indicated port on Tau will be closed, the related led will start blinking at a slow rate and port will not be opened before it is set to the value of open (1). Setting to the value of open (1) does not guarantee the opening of the indicated Tau port but will only allow the Tmu system to let this port into the ring if it obeys the rules of token ring. Reading this object returns the value last set by the manager (or the default value). This command holds only until the next system reset. To make sure it remains valid after reset, use 'fibTmuPortsGenClosePerm'. This command only affects ports on Tau. If there is no Tau attached when command is issued, then it is stored in memory and goes into effect when a Tau is attached. This command will have no effect of a station directly attached to the Tmu. To detach a directly attached station, use 'fibTmuPortsManagerCloseRun' (See above).")
fibTmuPortsGenClosePerm = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("open", 1), ("close", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsGenClosePerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsGenClosePerm.setDescription("When setting this object to the value of close (2) the indicated port on Tau is instructed to be closed after the next reset. Setting to the value of open (1) does not guarantee the opening of the indicated Tau port after the next reset but will only allow the Tmu system to let this port into the ring if it obeys the rules of token ring. Reading this object returns the value last set by the manager (or the default value). This command only affects ports on Tau. If there is no Tau attached at startup (after reset), then it is stored in memory and goes into effect when a Tau is attached. This command will have no effect of a station directly attached to the Tmu. To permanently detach a directly attached station, use 'fibTmuPortsManagerClosePerm' (See above).")
fibTmuPortsPhysAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 6), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsPhysAddr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsPhysAddr.setDescription("The token ring physical address of the station which is attached to the specified port. If station is not identified (e.g. fibTmuMatchListValid, see above, is not 'TRUE') then a string of 0xFF characters is returned.")
fibTmuPortsStationInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuPortsStationInfo.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsStationInfo.setDescription("Information on the attached station: If BIT0 is set (value = 0x01) then this is definitely a 'sniffer' port (i.e. a station which is sending 'trace tool present' messages is located on this port). If BIT1 is set (value = 0x02) then its location is doubtful (System has been unable to resolve the location of this station). If BIT2 is set (value = 0x04) then station is not attached to one of the tmu's ports but rather to the trunk. If bit3 is set (value = 0x08) then station is one of the tmu's chips. If BIT4 is set (value = 0x10) then this is a station attached to the tmu (and not on the trunk). If station is not identified (e.g. fibTmuMatchListValid, see above, is not 'TRUE') then -1 (0xFFFFFFFF) is returned.")
fibTmuPortsTauAllowedAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 8), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsTauAllowedAddr.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauAllowedAddr.setDescription("This is the one physical address allowed to be on this port on Tau. The value loaded in fibTmuPortsTauAllowedAddrLoaded below determines the state of this object. Writing to this object stores the address in non-volatile memory. It also makes security on this port active provided fibTmuSecurityModeRun has bit0 set. Loading will not be carried out if fibTmuNumStationLeft is 0 (no more space in non-volatile memory). In that case, an error will be returned (genErr = 5). This command only affects ports on Tau. If there is no Tau attached when command is issued, then it is stored in memory and goes into effect when a Tau is attached. This command will have no effect of a station directly attached to the Tmu. To affect such ports, use 'fibTmuPortsAllowedAddr'")
fibTmuPortsTauAllowedAddrLoaded = MibTableColumn((1, 3, 6, 1, 4, 1, 22, 60, 5, 30, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("addr-not-loaded", 1), ("loaded-not-active", 2), ("loaded-and-active", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuPortsTauAllowedAddrLoaded.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuPortsTauAllowedAddrLoaded.setDescription("This is the status of fibTmuPortsTauAllowedAddr. When reading, the possible values are as follows: addr-not-loaded indicates the value in fibTmuPortsTauAllowedAddr is meaningless. No security address has been loaded for this port. loaded-not-active indicates the value in fibTmuPortsTauAllowedAddr is stored in non volatile memory but security action for this port is not active. This happens when address is loaded while security action for the Tmu system is disabled (fibTmuSecurityModeRun has bit0 reset). loaded-and-active indicates the value in fibTmuPortsTauAllowedAddr is loaded in non volatile memory and security is active on this port. This means that if a station is directly connected to this port then it must have the specified physical address or this port will automatically close. As for writing, this object can be written only if its value is either 'loaded-not-active' or 'loaded-and-active' and the only value allowed is addr-not-loaded. In that case, 'fibTmuPortsTauAllowedAddr' is erased from the non-volatile memory and 'fibTmuNumStationLeft' is incremented. If port has been closed because of security (fibTmuPortsPortState had bit9 set) then it will open (provided it has not been closed because of some other reason). This command only affects ports on Tau. If there is no Tau attached when command is issued, then it is stored in memory and goes into effect when a Tau is attached. This command will have no effect of a station directly attached to the Tmu. To affect such ports, use 'fibTmuPortsAllowedAddrLoaded'")
fibTmuProductionAddr48No1 = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 1), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionAddr48No1.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionAddr48No1.setDescription('48 bit address of the chip set which is located at the primary in port.')
fibTmuProductionAddr48No2 = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 2), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionAddr48No2.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionAddr48No2.setDescription('48 bit address of the chip set which is located at the primary out port.')
fibTmuProductionAddr48No3 = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 3), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionAddr48No3.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionAddr48No3.setDescription('48 bit address of the chip set which is located at the secondary ring port.')
fibTmuProductionRomType = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("rom-27c020", 2), ("rom-27c040", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionRomType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionRomType.setDescription('Type of EPROM used in the Tmu system.')
fibTmuProductionRamType = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unknown", 1), ("ram-128k8", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionRamType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionRamType.setDescription('Type of RAM used in the Tmu system.')
fibTmuProductionFlashType = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("unknown", 1), ("flash-28f010", 2), ("flash-28f020", 3), ("flash-29f010", 4), ("flash-29f040", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionFlashType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionFlashType.setDescription('Type of FLASH used in the Tmu system.')
fibTmuProductionEepromType = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("with-page-mode", 2), ("without-page-mode", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionEepromType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionEepromType.setDescription('Type of EEPROM used in the Tmu system.')
fibTmuProductionSerialNum = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(3, 3)).setFixedLength(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionSerialNum.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionSerialNum.setDescription('Serial number of Tmu HW system.')
fibTmuProductionRamSize = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionRamSize.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionRamSize.setDescription("Size of RAM used in the Tmu system in units of 32K bytes. A value of '3' indicates size is not known.")
fibTmuProductionFlash0Size = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionFlash0Size.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionFlash0Size.setDescription("Size of FLASH memory bank #0 used in the Tmu system in units of 32K bytes. A value of '3' indicates size is not known.")
fibTmuProductionFlash1Size = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionFlash1Size.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionFlash1Size.setDescription("Size of FLASH memory bank #1 used in the Tmu system in units of 32K bytes. A value of '3' indicates size is not known. A value of '2' indicates chip is not installed.")
fibTmuProductionEepromSize = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionEepromSize.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionEepromSize.setDescription("Size of EEPROM memory used in the Tmu system in units of 0.5K bytes. A value of '3' indicates size is not known.")
fibTmuProductionHwInfo = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuProductionHwInfo.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionHwInfo.setDescription("General encoded information concerning Tmu system's hardware.")
fibTmuProductionBoardType = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 6, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("unknown", 1), ("board-9230", 2), ("board-9230-16", 3), ("board-9230-32", 4), ("board-9230-16F", 5), ("board-9230-32F", 6), ("board-9232-16", 7), ("board-9232-32", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuProductionBoardType.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuProductionBoardType.setDescription('Type of TMU Board.')
fibTmuSecurityModeRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 7, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuSecurityModeRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuSecurityModeRun.setDescription('The currently effective security mode. This is a bit map. This object is only loaded once after reset. To change it, fibTmuSecurityModeRun must be loaded and then system must be reset. BIT(0) - Station security. If set then some ports are marked as being allowed to have only specific stations (station per port). The list of port is specified below. REST - T.B.D.')
fibTmuSecurityModePerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 7, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuSecurityModePerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuSecurityModePerm.setDescription('The value currently in non volatile memory of the security mode. This is a bit map. See detals in fibTmuSecurityModeRun above. This object is only loaded into fibTmuSecurityModeRun once after reset. Changing it effects the system only after reset.')
fibTmuNumStationSecurity = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 7, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumStationSecurity.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumStationSecurity.setDescription("Maximal number of ports for which the security mode 'station security' (see fibTmuSecurityModeRun above) can be specified.")
fibTmuNumStationLeft = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 7, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuNumStationLeft.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuNumStationLeft.setDescription("Number of ports for which the security mode 'station security' (see fibTmuSecurityModeRun above) can still be specified. This is the difference between 'fibTmuNumStationSecurity' and the number of port for which the allowed station has already been specified.")
fibTmuFunctBeacon2AutotestRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(160, 200))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctBeacon2AutotestRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctBeacon2AutotestRun.setDescription("The time period for starting the 'autoremove' test after start of beaconing. In units of milliseconds. Minimal value: 160. maximal value 200. Factory default: 160. To change this object, load 'fibTmuFunctBeacon2AutotestPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctBeacon2OkRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(40, 200))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctBeacon2OkRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctBeacon2OkRun.setDescription("The upper bound for time period starting at the removal of a problem till ring status indicates no beaconing. In units of milliseconds. Minimal value: 40. maximal value 200. Factory default: 50. To change this object, load 'fibTmuFunctBeacon2OkPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctOk2BeaconRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1100, 2500))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctOk2BeaconRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctOk2BeaconRun.setDescription("The upper bound for time period starting at the insertion of a problem till ring status indicates beaconing. In units of milliseconds. Minimal value: 1100. maximal value 2500. Factory default: 2000. To change this object, load 'fibTmuFunctOk2BeaconPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctWrapCwtRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 200))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctWrapCwtRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctWrapCwtRun.setDescription("Time delay to apply, after changing warp relay state. In units of milliseconds. Intended to let TMS get the real ring status. Minimal value: 0. maximal value 200. Factory default: 100. To change this object, load 'fibTmuFunctWrapCwtPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctWrapWnrRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("wnr-dont-release-cpu", 1), ("wnr-release-cpu", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctWrapWnrRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctWrapWnrRun.setDescription("Flag indicating whether to release CPU while waiting after warp change. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. To change this object, load 'fibTmuFunctWrapWnrPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctRingIstRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 200))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctRingIstRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRingIstRun.setDescription("Time to poll for consistent Tmu ring status before continuing recovery. In units of milliseconds. Minimal value: 0. maximal value 200. Factory default: 0 To change this object, load 'fibTmuFunctRingIstPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctRingIstnrRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("istnr-dont-release-cpu", 1), ("istnr-release-cpu", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctRingIstnrRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRingIstnrRun.setDescription("Flag indicating whether to release CPU while waiting for consistent Tmu ring status. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. To change this object, load 'fibTmuFunctRingIstnrPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctCheckAogTauRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-check-ring", 1), ("check-ring", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctCheckAogTauRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctCheckAogTauRun.setDescription("Flag is used during BRUTAL recovery (single problem). If TRUE (checck-ring) then check whether the ring is BEACONING after opening the good TAU's (This check takes fibTmuFunctOk2BeaconRun milli sec. and not needed in most cases). Values: 1 - FALSE, 2 - TRUE. Factory default: FALSE. To change this object, load 'fibTmuFunctCheckAogTauPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctMaxNoiRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 300))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctMaxNoiRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctMaxNoiRun.setDescription("When several station wants to insert into the ring, this parameter sets the limit on the maximal number of stations which are allowed to insert (other stations are delayed until the next round). If 0 then enable all stations are allowed to insert. Minimal value: 0. maximal value 300. Factory default: 1 To change this object, load 'fibTmuFunctRingIstPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctLinkPtifRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-link-to-last", 1), ("link-to-last", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctLinkPtifRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctLinkPtifRun.setDescription("When problem in TMU segment is discovered then if this flag is TRUE, first of all try to find problem between last inserted stations. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. To change this object, load 'fibTmuFunctLinkPtifPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctInsPatRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1500, 4000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctInsPatRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctInsPatRun.setDescription("Parameter for controlling the aging of the 'inseretion-pool' (Should be long enough to make sure that the station is not malfunctioning). Units: milli seconds. Minimal value: 1500. maximal value 4000. Factory default: 3000 To change this object, load 'fibTmuFunctInsPatPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctUseMismatchRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-use-mismatch", 1), ("use-mismatch", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctUseMismatchRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctUseMismatchRun.setDescription("Indication on whether to apply recovery when the number of stations is larger than the number of open ports. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. To change this object, load 'fibTmuFunctUseMismatchPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctChkRingInsRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-check-ring", 1), ("check-ring", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctChkRingInsRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctChkRingInsRun.setDescription("Indication on whether to check ring status (for possible problems caused by the currently inserted stations) or not. Setting 'TRUE' results in quicker discovery of stations with invalid frequency but inflicts higher overhead on legitimate frequency stations. Values: 1 - FALSE, 2 - TRUE. Factory default: FALSE. To change this object, load 'fibTmuFunctChkRingInsPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctChkRingPerRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1500, 4000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctChkRingPerRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctChkRingPerRun.setDescription("Parameter indicating the elapsed time after letting a station into the ring after which a station is considered to be o.k. See 'fibTmuFunctChkRingInsRun' Units: milli seconds. Minimal value: 1500. maximal value 4000. Factory default: 3000 To change this object, load 'fibTmuFunctChkRingPerPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctClaimTimeOutRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(150, 500))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctClaimTimeOutRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctClaimTimeOutRun.setDescription("Parameter indicating the elapsed time between the detection of a problem and the beginning of sending 'claim'. Units: milli seconds. Minimal value: 150. maximal value 500. Factory default: 250 To change this object, load 'fibTmuFunctClaimTimeOutPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctAnotherCheckRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no-another-check", 1), ("another-check", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctAnotherCheckRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctAnotherCheckRun.setDescription("Indication on whether to perform another check before disabling a port as a cross check. Values: 1 - FALSE, 2 - TRUE. Factory default: FALSE. To change this object, load 'fibTmuFunctAnotherCheckPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctTmsOnOutRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctTmsOnOutRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctTmsOnOutRun.setDescription("Parameter indicating the tms to leave out of the ring when carrying out 'warp out': It could be TMS no. 2 (TMS OUT) or the TMS which is on the secondary ring. values: (TMS OUT) 1, (TMS SECONDARY) 2 Factory default: (TMS OUT) 1 To change this object, load 'fibTmuFunctTmsOnOutPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctUseJitterRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no-jitter-buster", 1), ("use-jitter-buster", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctUseJitterRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctUseJitterRun.setDescription("Indication on whether to use the jitter buster hardware module or not. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. To change this object, load 'fibTmuFunctUseJitterPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctForceStpRiRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("do-not-force-ri-stp", 1), ("force-ri-stp", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctForceStpRiRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctForceStpRiRun.setDescription("This object is used to control the type of the trunk (OPTIC or STP). Algorithm: if (optic-module-exists && do-not-force-ri-stp) { ri-trunk-type = OPTIC ; } else { /* * This is the case of: NO-OPTIC-MODULE or * FORCING-THE-TRUNK-TO-STP */ ri-trunk-type = STP ; } Factory default: FALSE (1). To change this object, load 'fibTmuFunctForceStpRiPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctForceStpRoRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("do-not-force-ro-stp", 1), ("force-ro-stp", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctForceStpRoRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctForceStpRoRun.setDescription("This object is used to control the type of the trunk (OPTIC or STP). Algorithm: if (optic-module-exists && do-not-force-ro-stp) { ro-trunk-type = OPTIC ; } else { /* * This is the case of: NO-OPTIC-MODULE or * FORCING-THE-TRUNK-TO-STP */ ro-trunk-type = STP ; } Factory default: FALSE (1). To change this object, load 'fibTmuFunctForceStpRoPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctMaxSavRecRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctMaxSavRecRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctMaxSavRecRun.setDescription("Parameter indicating how many elements to save in the fifo buffer of containing the history of recovery. Minimal value: 5. maximal value 20. Factory default: 20 To change this object, load 'fibTmuFunctMaxSavRecPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctReadPerRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctReadPerRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctReadPerRun.setDescription("Parameter indicating the elapsed time between two readings of the error counter of a TMS. Too large a period may result in counter overflow. A value of '0' indicates 'read every TR driver poll' Units: milli seconds. Minimal value: 0. maximal value 5000. Factory default: 5000. To change this object, load 'fibTmuFunctReadPerPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctDmaThreshRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctDmaThreshRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctDmaThreshRun.setDescription("Parameter indicating the number of times DMA will attempt retry in case of bus error before giving up. Minimal value: 0. maximal value 30. Factory default: 30. To change this object, load 'fibTmuFunctDmaThreshPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctRemWrapTypeRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctRemWrapTypeRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRemWrapTypeRun.setDescription("Parameter indicating the criterion for removing 'wrap': It could be removed when the ring is not beaconing and it could be removed when the ring is not claiming. values: (NOT BEACONING) 1, (NOT CLAIMING) 2 Factory default: (NOT BEACONING) 1 To change this object, load 'fibTmuFunctRemWrapTypePerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctRemWrapLenRun = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fibTmuFunctRemWrapLenRun.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRemWrapLenRun.setDescription("Parameter indicating the elapsed time between the time it is decided that wrap can be removed and the actual removal. See 'fibTmuFunctRemWrapTypeRun'. Units: milli seconds. Minimal value: 0. maximal value 2000. Factory default: 0. To change this object, load 'fibTmuFunctRemWrapLenPerm' into non-volatile ram. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctBeacon2AutotestPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(160, 200))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctBeacon2AutotestPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctBeacon2AutotestPerm.setDescription("The time period for starting the 'autoremove' test after start of beaconing. In units of milliseconds. Minimal value: 160. maximal value 200. Factory default: 160. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctBeacon2OkPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(40, 200))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctBeacon2OkPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctBeacon2OkPerm.setDescription('The upper bound for time period starting at the removal of a problem till ring status indicates no beaconing. In units of milliseconds. Minimal value: 40. maximal value 200. Factory default: 50. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctOk2BeaconPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1100, 2500))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctOk2BeaconPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctOk2BeaconPerm.setDescription('The upper bound for time period starting at the insertion of a problem till ring status indicates beaconing. In units of milliseconds. Minimal value: 1100. maximal value 2500. Factory default: 2000. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctWrapCwtPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 200))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctWrapCwtPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctWrapCwtPerm.setDescription('Time delay to apply, after changing warp relay state. In units of milliseconds. Intended to let TMS get the real ring status. Minimal value: 0. maximal value 200. Factory default: 100. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctWrapWnrPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-release-cpu", 1), ("release-cpu", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctWrapWnrPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctWrapWnrPerm.setDescription('Flag indicating whether to release CPU while waiting after warp change. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctRingIstPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 200))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctRingIstPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRingIstPerm.setDescription('Time to poll for consistent Tmu ring status before continuing recovery. In units of milliseconds. Minimal value: 0. maximal value 200. Factory default: 0 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctRingIstnrPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("istnr-dont-release-cpu", 1), ("istnr-release-cpu", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctRingIstnrPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRingIstnrPerm.setDescription('Flag indicating whether to release CPU while waiting for consistent Tmu ring status. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctCheckAogTauPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-check-ring", 1), ("check-ring", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctCheckAogTauPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctCheckAogTauPerm.setDescription("Flag is used during BRUTAL recovery (single problem). If TRUE (checck-ring) then check whether the ring is BEACONING after opening the good TAU's (This check takes fibTmuFunctOk2BeaconRun milli sec. and not needed in most cases). Values: 1 - FALSE, 2 - TRUE. Factory default: FALSE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctMaxNoiPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 300))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctMaxNoiPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctMaxNoiPerm.setDescription('When several station wants to insert into the ring, this parameter sets the limit on the maximal number of stations which are allowed to insert (other stations are delayed until the next round). If 0 then enable all stations are allowed to insert. Minimal value: 0. maximal value 300. Factory default: 1 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctLinkPtifPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-link-to-last", 1), ("link-to-last", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctLinkPtifPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctLinkPtifPerm.setDescription('When problem in TMU segment is discovered then if this flag is TRUE, first of all try to find problem between last inserted stations. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctInsPatPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1500, 4000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctInsPatPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctInsPatPerm.setDescription("Parameter for controlling the aging of the 'inseretion-pool' (Should be long enough to make sure that the station is not malfunctioning). Units: milli seconds. Minimal value: 1500. maximal value 4000. Factory default: 3000 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctUseMismatchPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-use-mismatch", 1), ("use-mismatch", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctUseMismatchPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctUseMismatchPerm.setDescription('Indication on whether to apply recovery when the number of stations is larger than the number of open ports. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctChkRingInsPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dont-check-ring", 1), ("check-ring", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctChkRingInsPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctChkRingInsPerm.setDescription("Indication on whether to check ring status (for possible problems caused by the currently inserted stations) or not. Setting 'TRUE' results in quicker discovery of stations with invalid frequency but inflicts higher overhead on legitimate frequency stations. Values: 1 - FALSE, 2 - TRUE. Factory default: FALSE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctChkRingPerPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1500, 4000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctChkRingPerPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctChkRingPerPerm.setDescription("Parameter indicating the elapsed time after letting a station into the ring after which a station is considered to be o.k. See 'fibTmuFunctChkRingInsPerm' Units: milli seconds. Minimal value: 1500. maximal value 4000. Factory default: 3000 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctClaimTimeOutPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(150, 500))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctClaimTimeOutPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctClaimTimeOutPerm.setDescription("Parameter indicating the elapsed time between the detection of a problem and the beginning of sending 'claim'. Units: milli seconds. Minimal value: 150. maximal value 500. Factory default: 250 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctAnotherCheckPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no-another-check", 1), ("another-check", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctAnotherCheckPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctAnotherCheckPerm.setDescription('Indication on whether to perform another check before disabling a port as a cross check. Values: 1 - FALSE, 2 - TRUE. Factory default: FALSE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctTmsOnOutPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctTmsOnOutPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctTmsOnOutPerm.setDescription("Parameter indicating the tms to leave out of the ring when carrying out 'warp out': It could be TMS no. 2 (TMS OUT) or the TMS which is on the secondary ring. values: (TMS OUT) 1, (TMS SECONDARY) 2 Factory default: (TMS OUT) 1 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctUseJitterPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no-jitter-buster", 1), ("use-jitter-buster", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctUseJitterPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctUseJitterPerm.setDescription('Indication on whether to use the jitter buster hardware module or not. Values: 1 - FALSE, 2 - TRUE. Factory default: TRUE. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctForceStpRiPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("do-not-force-ri-stp", 1), ("force-ri-stp", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctForceStpRiPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctForceStpRiPerm.setDescription('This object is used to control the type of the trunk (OPTIC or STP). Algorithm: if (optic-module-exists && do-not-force-ri-stp) { ri-trunk-type = OPTIC ; } else { /* * This is the case of: NO-OPTIC-MODULE or * FORCING-THE-TRUNK-TO-STP */ ri-trunk-type = STP ; } Factory default: FALSE (1). Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctForceStpRoPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("do-not-force-ro-stp", 1), ("force-ro-stp", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctForceStpRoPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctForceStpRoPerm.setDescription('This object is used to control the type of the trunk (OPTIC or STP). Algorithm: if (optic-module-exists && do-not-force-ro-stp) { ro-trunk-type = OPTIC ; } else { /* * This is the case of: NO-OPTIC-MODULE or * FORCING-THE-TRUNK-TO-STP */ ro-trunk-type = STP ; } Factory default: FALSE (1). Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctMaxSavRecPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 20))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctMaxSavRecPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctMaxSavRecPerm.setDescription('Parameter indicating how many elements to save in the fifo buffer of containing the history of recovery. Minimal value: 5. maximal value 20. Factory default: 20 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctReadPerPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctReadPerPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctReadPerPerm.setDescription("Parameter indicating the elapsed time between two readings of the error counter of a TMS. Too large a period may result in counter overflow. A value of '0' indicates 'read every TR driver poll' Units: milli seconds. Minimal value: 0. maximal value 5000. Factory default: 5000. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctDmaThreshPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctDmaThreshPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctDmaThreshPerm.setDescription('Parameter indicating the number of times DMA will attempt retry in case of bus error before giving up. Minimal value: 0. maximal value 30. Factory default: 30. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.')
fibTmuFunctRemWrapTypePerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctRemWrapTypePerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRemWrapTypePerm.setDescription("Parameter indicating the criterion for removing 'wrap': It could be removed when the ring is not beaconing and it could be removed when the ring is not claiming. values: (NOT BEACONING) 1, (NOT CLAIMING) 2 Factory default: (NOT BEACONING) 1 Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
fibTmuFunctRemWrapLenPerm = MibScalar((1, 3, 6, 1, 4, 1, 22, 60, 8, 2, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fibTmuFunctRemWrapLenPerm.setStatus('mandatory')
if mibBuilder.loadTexts: fibTmuFunctRemWrapLenPerm.setDescription("Parameter indicating the elapsed time between the time it is decided that wrap can be removed and the actual removal. See 'fibTmuFunctRemWrapTypePerm'. Units: milli seconds. Minimal value: 0. maximal value 2000. Factory default: 0. Setting this variable loads it into non volatile memory. The value written into non volatile memory is loaded into the current value only after reset.")
mibBuilder.exportSymbols("TMU-MIB", fibTmuNumRxSpecific=fibTmuNumRxSpecific, fibTmuArpAgeTime=fibTmuArpAgeTime, fibTmuFunctInsPatRun=fibTmuFunctInsPatRun, fibTmuFunctRingIstPerm=fibTmuFunctRingIstPerm, fibTmuMatchIndex=fibTmuMatchIndex, fibTmuMatchNoMatchReason=fibTmuMatchNoMatchReason, fibTmuNumUnknownType=fibTmuNumUnknownType, fibTmuNumRxBdcst=fibTmuNumRxBdcst, fibTmuPortsNumPorts=fibTmuPortsNumPorts, fibTmuIntrfcEntry=fibTmuIntrfcEntry, fibTmuFunctForceStpRiRun=fibTmuFunctForceStpRiRun, fibTmuParityEc=fibTmuParityEc, fibTmuStationsTable=fibTmuStationsTable, fibTmuEepromVersion=fibTmuEepromVersion, fibTmuPortsLastPollTime=fibTmuPortsLastPollTime, fibTmuNumRifLong=fibTmuNumRifLong, fibTmuPortsPortState=fibTmuPortsPortState, fibTmuPortsTauAllowedAddr=fibTmuPortsTauAllowedAddr, fibTmuStationsStationsListValid=fibTmuStationsStationsListValid, fibTmuFunctWrapCwtRun=fibTmuFunctWrapCwtRun, fibTmuProductionFlash0Size=fibTmuProductionFlash0Size, fibTmuFunctBeacon2OkPerm=fibTmuFunctBeacon2OkPerm, fibTmuRunFaultsFormat=fibTmuRunFaultsFormat, fibTmuNumBeaconsRcvd=fibTmuNumBeaconsRcvd, fibTmuPortsTmuPortIndex=fibTmuPortsTmuPortIndex, fibTmuProductionRomType=fibTmuProductionRomType, fibTmuLoadEeDefaults=fibTmuLoadEeDefaults, fibTmuFunctRingIstnrPerm=fibTmuFunctRingIstnrPerm, fibTmuPortsTauAllowedAddrLoaded=fibTmuPortsTauAllowedAddrLoaded, fibTmuMatchPortId=fibTmuMatchPortId, fibTmuPortsTauMode=fibTmuPortsTauMode, fibTmuRxNoiseEc=fibTmuRxNoiseEc, fibTmuFunctMaxSavRecRun=fibTmuFunctMaxSavRecRun, fibTmuFunctWrapCwtPerm=fibTmuFunctWrapCwtPerm, fibTmuSccDrvVersion=fibTmuSccDrvVersion, fibTmuFunctUseJitterPerm=fibTmuFunctUseJitterPerm, fibTmuMatchConfigChipIndex=fibTmuMatchConfigChipIndex, fibTmuMatchNumEntries=fibTmuMatchNumEntries, fibTmuNumNonSnap=fibTmuNumNonSnap, private=private, fibTmuFunctRingIstnrRun=fibTmuFunctRingIstnrRun, fibTmuFunctReadPerRun=fibTmuFunctReadPerRun, fibTmuStationsPhysAddr=fibTmuStationsPhysAddr, fibTmuPortsLedState=fibTmuPortsLedState, fibTmuMatchListValid=fibTmuMatchListValid, fibTmuStationsSecondChipIndex=fibTmuStationsSecondChipIndex, fibTmuPortsTmuPort=fibTmuPortsTmuPort, fibTmuFunctChkRingInsPerm=fibTmuFunctChkRingInsPerm, fibTmuDisplayDebugMode=fibTmuDisplayDebugMode, fibTmuFunctWrapWnrPerm=fibTmuFunctWrapWnrPerm, fibTmuFunctAnotherCheckPerm=fibTmuFunctAnotherCheckPerm, enterprises=enterprises, fibTmuPortsAllTable=fibTmuPortsAllTable, tmuStations=tmuStations, fibTmuProductionAddr48No1=fibTmuProductionAddr48No1, fibTmuFunctRtpGrpVrsRun=fibTmuFunctRtpGrpVrsRun, fibTmuNumArpRcvd=fibTmuNumArpRcvd, fibTmuEeFaultsFormat=fibTmuEeFaultsFormat, fibTmuNumRrpRjctIp=fibTmuNumRrpRjctIp, fibTmuNumRxRjctMem=fibTmuNumRxRjctMem, fibTmuMatchNumTmuEntries=fibTmuMatchNumTmuEntries, fibTmuDontResetFatal=fibTmuDontResetFatal, fibTmuFunctBeacon2OkRun=fibTmuFunctBeacon2OkRun, mgmt=mgmt, fibTmuFunctDmaThreshPerm=fibTmuFunctDmaThreshPerm, fibTmuLastBeaconTime=fibTmuLastBeaconTime, fibTmuProductionEepromType=fibTmuProductionEepromType, fibTmuSnmpUsrVersion=fibTmuSnmpUsrVersion, fibTmuPortsAllowedAddrLoaded=fibTmuPortsAllowedAddrLoaded, fibTmuStationsNumTmuEntries=fibTmuStationsNumTmuEntries, fibTmuEepromDeffective=fibTmuEepromDeffective, fibTmuRealTimeClock=fibTmuRealTimeClock, fibTmuAc00Cntr=fibTmuAc00Cntr, fibTmuStationsNumEntries=fibTmuStationsNumEntries, fibTmuPortsUpdateTime=fibTmuPortsUpdateTime, fibTmuPortsTauPortIndex=fibTmuPortsTauPortIndex, fibTmuFunctChkRingInsRun=fibTmuFunctChkRingInsRun, fibTmuProductionBoardType=fibTmuProductionBoardType, fibTmuFunctClaimTimeOutPerm=fibTmuFunctClaimTimeOutPerm, fibTmuFunctRingIstRun=fibTmuFunctRingIstRun, fibTmuNumNoRif=fibTmuNumNoRif, fibTmuStationsFirstChipIndex=fibTmuStationsFirstChipIndex, fibTmuPortsNumPortsPerTmu=fibTmuPortsNumPortsPerTmu, fibTmuIntrfcIndex=fibTmuIntrfcIndex, fibTmuNumStationLeft=fibTmuNumStationLeft, fibTmuFunctCheckAogTauPerm=fibTmuFunctCheckAogTauPerm, fibTmuPortsTauRevision=fibTmuPortsTauRevision, fibTmuFunctOk2BeaconPerm=fibTmuFunctOk2BeaconPerm, fibTmuFunctRemWrapLenRun=fibTmuFunctRemWrapLenRun, fibTmuProductionFlash1Size=fibTmuProductionFlash1Size, fibTmuFunctLinkPtifRun=fibTmuFunctLinkPtifRun, fibTmuFunctTmsOnOutPerm=fibTmuFunctTmsOnOutPerm, fibTmuPortsNumTausPerTmu=fibTmuPortsNumTausPerTmu, fibTmuPortsGenClosePerm=fibTmuPortsGenClosePerm, fibTmuSecurityModePerm=fibTmuSecurityModePerm, fibTmuFunctAnotherCheckRun=fibTmuFunctAnotherCheckRun, fibTmuAc01Cntr=fibTmuAc01Cntr, fibTmuProductionRamType=fibTmuProductionRamType, fibTmuNumProcessDisabled=fibTmuNumProcessDisabled, fibTmuMatchSecondChipIndex=fibTmuMatchSecondChipIndex, fibTmuStationsUpToDate=fibTmuStationsUpToDate, fibTmuProductionAddr48No2=fibTmuProductionAddr48No2, fibTmuMatchEntry=fibTmuMatchEntry, fibTmuFunctClaimTimeOutRun=fibTmuFunctClaimTimeOutRun, fibTmuFunctLinkPtifPerm=fibTmuFunctLinkPtifPerm, fibTmuMatchTable=fibTmuMatchTable, fibTmuStationsEntry=fibTmuStationsEntry, fibTmuFunctRemWrapTypeRun=fibTmuFunctRemWrapTypeRun, fibTmuFunctBeacon2AutotestRun=fibTmuFunctBeacon2AutotestRun, fibTmuSecurityModeRun=fibTmuSecurityModeRun, fibTmuNumStationSecurity=fibTmuNumStationSecurity, fibTmuPortsNumPortsPerTau=fibTmuPortsNumPortsPerTau, fibTmuStationsUpdateTime=fibTmuStationsUpdateTime, fibTmuNumIfDown=fibTmuNumIfDown, fibTmuPortsNumModules=fibTmuPortsNumModules, fibTmuPortsTauTable=fibTmuPortsTauTable, fibTmuFunctForceStpRiPerm=fibTmuFunctForceStpRiPerm, tmuSecurity=tmuSecurity, fibTmuMatchActMonIndex=fibTmuMatchActMonIndex, fibronics=fibronics, fibTmuPortsTauEntry=fibTmuPortsTauEntry, fibTmuTrDrvVersion=fibTmuTrDrvVersion, fibTmuFunctCheckAogTauRun=fibTmuFunctCheckAogTauRun, fibTmuProductionHwInfo=fibTmuProductionHwInfo, fibTmuFunctTmsOnOutRun=fibTmuFunctTmsOnOutRun, fibTmuMatchUpdateTime=fibTmuMatchUpdateTime, fibTmuUtilitiesVersion=fibTmuUtilitiesVersion, fibTmuProductionRamSize=fibTmuProductionRamSize, fibTmuPortsModuleId=fibTmuPortsModuleId, fibTmuAc10Cntr=fibTmuAc10Cntr, fibTmuMainSwVersion=fibTmuMainSwVersion, fibTmuMatchFirstChipIndex=fibTmuMatchFirstChipIndex, fibTmuFunctReadPerPerm=fibTmuFunctReadPerPerm, fibTmuLastBeaconNaun=fibTmuLastBeaconNaun, fibTmuNumRifIncluded=fibTmuNumRifIncluded, fibTmuMatchUpToDate=fibTmuMatchUpToDate, fibTmuAc11Cntr=fibTmuAc11Cntr, fibTmuNumIpRcvd=fibTmuNumIpRcvd, fibTmuPortsManagerClosePerm=fibTmuPortsManagerClosePerm, fibTmuPortsMaxNumTauErrs=fibTmuPortsMaxNumTauErrs, fibTmuRIConnection=fibTmuRIConnection, fibTmuNumRxAccepted=fibTmuNumRxAccepted, fibTmuProductionAddr48No3=fibTmuProductionAddr48No3, fibTmuPortsNumPhantomUp=fibTmuPortsNumPhantomUp, fibTmuBreakEc=fibTmuBreakEc, fibTmuPortsLastTxTime=fibTmuPortsLastTxTime, fibTmuFunctInsPatPerm=fibTmuFunctInsPatPerm, fibTmuPortsNumPhantomPresent=fibTmuPortsNumPhantomPresent, fibTmuMatchTauId=fibTmuMatchTauId, fibTmuIntrfcTable=fibTmuIntrfcTable, fibTmuPortsStationInfo=fibTmuPortsStationInfo, fibTmuWrapIn=fibTmuWrapIn, fibTmuPortsPortStatus=fibTmuPortsPortStatus, fibTmuPortsTauPortState=fibTmuPortsTauPortState, fibTmuStationsIndex=fibTmuStationsIndex, fibTmuPortsPhysAddr=fibTmuPortsPhysAddr, fibTmuFunctBeacon2AutotestPerm=fibTmuFunctBeacon2AutotestPerm, fibTmuMatchPhysAddr=fibTmuMatchPhysAddr, fibTmuROConnection=fibTmuROConnection, fibTmuFrameEc=fibTmuFrameEc, tmuMatch=tmuMatch, fibTmuFunctForceStpRoRun=fibTmuFunctForceStpRoRun, fibTmuPortsMaxFirstTimeout=fibTmuPortsMaxFirstTimeout, tmu=tmu, fibTmuNumAddrNotFound=fibTmuNumAddrNotFound, fibTmuFunctRemWrapLenPerm=fibTmuFunctRemWrapLenPerm, fibTmuFunctWrapWnrRun=fibTmuFunctWrapWnrRun, fibTmuPortsNumRelayOpen=fibTmuPortsNumRelayOpen, fibTmuNumConfigNotValid=fibTmuNumConfigNotValid, fibTmuLastBeaconAddr=fibTmuLastBeaconAddr, fibTmuMatchStationInfo=fibTmuMatchStationInfo, fibTmuNumEventErrs=fibTmuNumEventErrs, fibTmuLastBeaconType=fibTmuLastBeaconType, fibTmuFunctForceStpRoPerm=fibTmuFunctForceStpRoPerm, fibTmuNumRxGroup=fibTmuNumRxGroup, fibTmuFunctionalityVersion=fibTmuFunctionalityVersion, tmuFunction=tmuFunction, fibTmuFunctUseMismatchPerm=fibTmuFunctUseMismatchPerm, fibTmuProductionFlashType=fibTmuProductionFlashType, fibTmuNumOwnBrdcst=fibTmuNumOwnBrdcst, fibTmuProductionEepromSize=fibTmuProductionEepromSize, fibTmuFunctDmaThreshRun=fibTmuFunctDmaThreshRun, fibTmuFunctChkRingPerPerm=fibTmuFunctChkRingPerPerm, fibTmuNumRarpUpdate=fibTmuNumRarpUpdate, fibTmuLastSysIfIndex=fibTmuLastSysIfIndex, tmuProduction=tmuProduction, fibTmuNumRarpRcvd=fibTmuNumRarpRcvd, fibTmuPortsNumTimeout=fibTmuPortsNumTimeout, fibTmuStationsConfigChipIndex=fibTmuStationsConfigChipIndex, fibTmuStationsStationInfo=fibTmuStationsStationInfo, fibTmuPortsRequestType=fibTmuPortsRequestType, fibTmuPortsAllowedAddr=fibTmuPortsAllowedAddr, fibTmuProductionSerialNum=fibTmuProductionSerialNum, fibTmuPortsNumConsequentErrs=fibTmuPortsNumConsequentErrs, fibTmuPortsGenCloseRun=fibTmuPortsGenCloseRun, fibTmuPortsNumAttached=fibTmuPortsNumAttached, fibTmuFunctUseJitterRun=fibTmuFunctUseJitterRun, fibTmuStationsActMonIndex=fibTmuStationsActMonIndex, fibTmuFunctChkRingPerRun=fibTmuFunctChkRingPerRun, fibTmuFunctMaxNoiRun=fibTmuFunctMaxNoiRun, fibTmuFunctMaxSavRecPerm=fibTmuFunctMaxSavRecPerm, fibTmuNumRxFunctional=fibTmuNumRxFunctional, fibTmuFunctOk2BeaconRun=fibTmuFunctOk2BeaconRun, fibTmuPortsPortType=fibTmuPortsPortType, fibTmuPortsManagerCloseRun=fibTmuPortsManagerCloseRun, fibTmuFunctMaxNoiPerm=fibTmuFunctMaxNoiPerm, tmuSystem=tmuSystem, fibTmuPortsEndConnection=fibTmuPortsEndConnection, fibTmuFunctRemWrapTypePerm=fibTmuFunctRemWrapTypePerm, fibTmuWrapOut=fibTmuWrapOut, fibTmuFunctUseMismatchRun=fibTmuFunctUseMismatchRun, fibTmuMatchNoStationsListReason=fibTmuMatchNoStationsListReason, fibTmuPortsAllEntry=fibTmuPortsAllEntry, tmuPorts=tmuPorts, fibTmuFunctRtpGrpVrsPerm=fibTmuFunctRtpGrpVrsPerm, tmuIntrfc=tmuIntrfc)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
515b90f6751473f9ac482175b57299289a9c2859 | 2b3365bf01a910436edaf1448f182549ffd44525 | /_12_wikipedia.py | 6ca92cc80335bc89912f23a7a4b58cc5477fcc0a | [] | no_license | wlgud0402/croller | 2f1cb93f969d87890d2fb35992833ce3070c95dc | 56339880081042a25b8057caaca67f64a1f20df5 | refs/heads/master | 2021-05-20T23:29:57.503376 | 2020-04-29T06:41:06 | 2020-04-29T06:41:06 | 252,453,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | #위키백과 수집과 출력이 가능한 프로그램
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
pages = set()
def getLinks(pageUrl):
global pages
html = urlopen('http://en.wikipedia.org' + pageUrl)
bs = BeautifulSoup(html, 'html.parser')
try:
print(bs.h1.get_text()) #태그없이 안의 텍스트만 출력해주는 함수
print(bs.find(id = 'mw-content-text').findAll('p')[0])
print(bs.find(id = 'ca-edit').find('span').find('a').attrs['href'])
except AttributeError:
print('This page is missing something! No worries though!')
for link in bs.findAll('a', href = re.compile('^(/wiki/)')):
if 'href' in link.attrs:
if link.attrs['href'] not in pages:
newPage = link.attrs['href']
print('-----------\n' + newPage)
pages.add(newPage)
getLinks(newPage)
getLinks('') | [
"wlgudrlgus@naver.com"
] | wlgudrlgus@naver.com |
786752a2164cf9e296256aef2ffb9e724b2159c2 | 6ead0d3997aa3470fc6f49c6ccc0ac8f808ae5d7 | /problems/python/tests/test_topKFrequent.py | ff3276cafa50cdee80f65c10f1b42103e8ab649e | [] | no_license | ikedaosushi/leetcode | d405455bfffda3057259da78783901feb56d9f76 | d378f2dc5f0b2df1f00208e304979ac0f53ab385 | refs/heads/master | 2021-06-24T04:31:56.586685 | 2020-12-08T13:51:18 | 2020-12-08T13:51:18 | 178,659,078 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | import pytest
from topKFrequent import Solution
@pytest.mark.parametrize("nums, k, expected", [
([1, 1, 1, 2, 2, 3], 2, [1, 2]),
([1], 1, [1]),
])
def test_topKFrequent(nums, k, expected):
actual = Solution().topKFrequent(nums, k)
assert actual == expected
| [
"yutaro.ikeda@kaizenplatform.com"
] | yutaro.ikeda@kaizenplatform.com |
a4ef4b4d7cd9ed4952e319fab4b7a38c79db702c | c7cbbd4b1c1e281cef5f4a0c4e3d4a97cee2241e | /froide/document/models.py | 5a8eb1e966fd65554b6679d2fd3c8159cf5c00d1 | [
"MIT"
] | permissive | manonthemat/froide | 078cf78a6eb35226512c0bdfa2ac9043bcc81ad9 | 698c49935eaf2e922f3c9f6a46af0fd545ccbbbb | refs/heads/master | 2020-08-14T08:19:36.215473 | 2019-10-14T19:43:16 | 2019-10-14T19:43:16 | 215,129,869 | 0 | 0 | MIT | 2019-10-14T19:35:49 | 2019-10-14T19:35:49 | null | UTF-8 | Python | false | false | 2,203 | py | from django.db import models
from filingcabinet.models import (
AbstractDocument,
AbstractDocumentCollection,
get_page_image_filename
)
class Document(AbstractDocument):
original = models.ForeignKey(
'foirequest.FoiAttachment', null=True, blank=True,
on_delete=models.SET_NULL, related_name='original_document'
)
foirequest = models.ForeignKey(
'foirequest.FoiRequest', null=True, blank=True,
on_delete=models.SET_NULL
)
publicbody = models.ForeignKey(
'publicbody.PublicBody', null=True, blank=True,
on_delete=models.SET_NULL
)
team = models.ForeignKey(
'team.Team', null=True, blank=True,
on_delete=models.SET_NULL
)
def is_public(self):
return self.public
def get_serializer_class(self, detail=False):
from .api_views import DocumentSerializer, DocumentDetailSerializer
if detail:
return DocumentDetailSerializer
return DocumentSerializer
def get_crossdomain_auth(self, filename=None):
from .auth import DocumentCrossDomainMediaAuth
if filename is None:
filename = self.get_document_filename()
return DocumentCrossDomainMediaAuth({
'object': self,
'filename': filename
})
def get_authorized_file_url(self, filename=None):
if self.public:
return self.get_file_url(filename=filename)
return self.get_crossdomain_auth(filename=filename).get_full_media_url(
authorized=True
)
def get_page_template(self):
return self.get_authorized_file_url(filename=get_page_image_filename())
def get_cover_image(self):
return self.get_authorized_file_url(filename=get_page_image_filename(
page=1, size='small'
))
class DocumentCollection(AbstractDocumentCollection):
team = models.ForeignKey(
'team.Team', null=True, blank=True,
on_delete=models.SET_NULL
)
def is_public(self):
return self.public
def get_serializer_class(self):
from .api_views import DocumentCollectionSerializer
return DocumentCollectionSerializer
| [
"mail@stefanwehrmeyer.com"
] | mail@stefanwehrmeyer.com |
d36495eda9b83ba3f4c7f79595a3a843826a4cc7 | 918382f9515dd37c0815f81f64f954a4f78668ab | /env/lib/python3.6/operator.py | 9f2d23ae250dfed65fbcbb35a4dafcc28a26dec5 | [] | no_license | BwanaQ/django-hekalu | 38d47fac421d91553cc8cd86dd4dff972b78a5d6 | 614312eff054db58dfa06f78a7f9bb3d2c8ee519 | refs/heads/master | 2023-08-03T03:12:38.091514 | 2020-04-25T07:12:42 | 2020-04-25T07:12:42 | 258,124,464 | 0 | 0 | null | 2021-09-22T18:55:06 | 2020-04-23T07:15:34 | JavaScript | UTF-8 | Python | false | false | 47 | py | /home/hunja/anaconda3/lib/python3.6/operator.py | [
"thunjawax@gmail.com"
] | thunjawax@gmail.com |
61b14f9bef79acaf12332a76c4bd2bd064d76dd6 | 0d8486c1d55c40bebea7c5428930f18165d2d0e9 | /tests/asp/AllAnswerSets/tight/graph.colouring.3.40.5_9.asp.test.py | a44c3c0c27b04eb04ff675f2b1c10ac44475d599 | [
"Apache-2.0"
] | permissive | bernardocuteri/wasp | 6f81bf6aa8fb273c91bbf68ecce4ecb195a55953 | 05c8f961776dbdbf7afbf905ee00fc262eba51ad | refs/heads/master | 2021-06-08T11:58:25.080818 | 2020-10-05T16:57:37 | 2020-10-05T16:57:37 | 124,245,808 | 0 | 0 | Apache-2.0 | 2018-03-07T14:13:16 | 2018-03-07T14:13:16 | null | UTF-8 | Python | false | false | 24,223 | py | input = """
1 2 0 0
1 3 0 0
1 4 0 0
1 5 0 0
1 6 0 0
1 7 0 0
1 8 0 0
1 9 0 0
1 10 0 0
1 11 0 0
1 12 0 0
1 13 0 0
1 14 0 0
1 15 0 0
1 16 0 0
1 17 0 0
1 18 0 0
1 19 0 0
1 20 0 0
1 21 0 0
1 22 0 0
1 23 0 0
1 24 0 0
1 25 0 0
1 26 0 0
1 27 0 0
1 28 0 0
1 29 0 0
1 30 0 0
1 31 0 0
1 32 0 0
1 33 0 0
1 34 0 0
1 35 0 0
1 36 0 0
1 37 0 0
1 38 0 0
1 39 0 0
1 40 0 0
1 41 0 0
1 42 0 0
1 43 0 0
1 44 0 0
1 45 0 0
1 46 0 0
1 47 0 0
1 48 0 0
1 49 0 0
1 50 0 0
1 51 0 0
1 52 0 0
1 53 0 0
1 54 0 0
1 55 0 0
1 56 0 0
1 57 0 0
1 58 0 0
1 59 0 0
1 60 0 0
1 61 0 0
1 62 0 0
1 63 0 0
1 64 0 0
1 65 0 0
1 66 0 0
1 67 0 0
1 68 0 0
1 69 0 0
1 70 0 0
1 71 0 0
1 72 0 0
1 73 0 0
1 74 0 0
1 75 0 0
1 76 0 0
1 77 0 0
1 78 0 0
1 79 0 0
1 80 0 0
1 81 0 0
1 82 0 0
1 83 0 0
1 84 0 0
1 85 0 0
1 86 0 0
1 87 0 0
1 88 0 0
1 89 0 0
1 90 0 0
1 91 0 0
1 92 0 0
1 93 0 0
1 94 0 0
1 95 0 0
1 96 0 0
1 97 0 0
1 98 0 0
1 99 0 0
1 100 0 0
1 101 0 0
1 102 0 0
1 103 0 0
1 104 0 0
1 105 0 0
1 106 0 0
1 107 0 0
1 108 0 0
1 109 0 0
1 110 0 0
1 111 0 0
1 112 0 0
1 113 0 0
1 114 0 0
1 115 0 0
1 116 0 0
1 117 0 0
1 118 0 0
1 119 0 0
1 120 0 0
1 121 0 0
1 122 0 0
1 123 0 0
1 124 0 0
1 125 0 0
1 126 0 0
1 127 0 0
1 128 0 0
1 129 0 0
1 130 0 0
1 131 0 0
1 132 0 0
1 133 0 0
1 134 0 0
1 135 0 0
1 136 0 0
1 137 0 0
1 138 0 0
1 139 0 0
1 140 0 0
1 141 0 0
1 142 0 0
1 143 0 0
1 144 0 0
1 145 0 0
1 146 0 0
1 147 0 0
1 148 0 0
1 149 0 0
1 150 0 0
1 151 0 0
1 152 0 0
1 153 0 0
1 154 0 0
1 155 0 0
1 156 0 0
1 157 0 0
1 158 0 0
1 159 0 0
1 160 0 0
1 161 0 0
1 162 0 0
1 163 0 0
1 164 0 0
1 165 0 0
1 166 0 0
1 167 0 0
1 168 0 0
1 169 0 0
1 170 0 0
1 171 0 0
1 172 0 0
1 173 0 0
1 174 0 0
1 175 0 0
1 176 0 0
1 177 0 0
1 178 0 0
1 179 0 0
1 180 0 0
1 181 0 0
1 182 0 0
1 183 0 0
1 184 0 0
1 185 0 0
1 186 0 0
1 187 0 0
1 188 0 0
1 189 0 0
1 190 0 0
1 191 0 0
1 192 0 0
1 193 0 0
1 194 0 0
1 195 0 0
1 196 0 0
1 197 0 0
1 198 0 0
1 199 0 0
1 200 0 0
1 201 0 0
1 202 0 0
1 203 0 0
1 204 0 0
1 205 0 0
1 206 0 0
1 207 0 0
1 208 0 0
1 209 0 0
1 210 0 0
1 211 0 0
1 212 0 0
1 213 0 0
1 214 0 0
1 215 0 0
1 216 0 0
1 217 0 0
1 218 0 0
1 219 0 0
1 220 0 0
1 221 0 0
1 222 0 0
1 223 0 0
1 224 0 0
1 225 0 0
1 226 0 0
1 227 0 0
1 228 0 0
1 229 0 0
1 230 0 0
1 231 0 0
1 232 0 0
1 233 0 0
1 234 0 0
1 235 0 0
1 236 0 0
1 237 0 0
1 238 0 0
1 239 0 0
1 240 0 0
1 241 0 0
1 242 0 0
1 243 0 0
1 244 0 0
1 245 0 0
1 246 0 0
1 247 0 0
1 248 0 0
1 249 0 0
1 250 0 0
1 251 0 0
1 252 0 0
1 253 0 0
1 254 0 0
1 255 0 0
1 256 0 0
1 257 0 0
1 258 0 0
1 259 0 0
1 260 0 0
1 261 0 0
1 262 0 0
1 263 0 0
1 264 0 0
1 265 0 0
1 266 0 0
1 267 0 0
1 268 0 0
1 269 0 0
1 270 0 0
1 271 0 0
1 272 0 0
1 273 0 0
1 274 0 0
1 275 0 0
1 276 0 0
1 277 0 0
1 278 0 0
1 279 0 0
1 280 0 0
1 281 0 0
1 282 0 0
1 283 0 0
1 284 0 0
1 285 0 0
1 286 0 0
1 287 0 0
1 288 0 0
1 289 0 0
1 290 0 0
1 291 0 0
1 292 0 0
1 293 0 0
1 294 0 0
1 295 0 0
1 296 0 0
1 297 0 0
1 298 0 0
1 299 0 0
1 300 0 0
1 301 0 0
1 302 0 0
1 303 0 0
1 304 0 0
1 305 0 0
1 306 0 0
1 307 0 0
1 308 0 0
1 309 0 0
1 310 0 0
1 311 0 0
1 312 0 0
1 313 0 0
1 314 0 0
1 315 0 0
1 316 0 0
1 317 0 0
1 318 0 0
1 319 0 0
1 320 0 0
1 321 0 0
1 322 0 0
1 323 0 0
1 324 0 0
1 325 0 0
1 326 0 0
1 327 0 0
1 328 0 0
1 329 0 0
1 330 0 0
1 331 0 0
1 332 0 0
1 333 0 0
1 334 0 0
1 335 0 0
1 336 0 0
1 337 0 0
1 338 0 0
1 339 0 0
1 340 2 1 341 342
1 341 2 1 340 342
1 342 0 0
1 343 2 1 344 345
1 344 2 1 343 345
1 345 0 0
1 346 2 1 347 348
1 347 2 1 346 348
1 348 0 0
1 349 2 1 350 351
1 350 2 1 349 351
1 351 0 0
1 352 2 1 353 354
1 353 2 1 352 354
1 354 0 0
1 355 2 1 356 357
1 356 2 1 355 357
1 357 0 0
1 358 2 1 359 360
1 359 2 1 358 360
1 360 0 0
1 361 2 1 362 363
1 362 2 1 361 363
1 363 0 0
1 364 2 1 365 366
1 365 2 1 364 366
1 366 0 0
1 367 2 1 368 369
1 368 2 1 367 369
1 369 0 0
1 370 2 1 371 372
1 371 2 1 370 372
1 372 0 0
1 373 2 1 374 375
1 374 2 1 373 375
1 375 0 0
1 376 2 1 377 378
1 377 2 1 376 378
1 378 0 0
1 379 2 1 380 381
1 380 2 1 379 381
1 381 0 0
1 382 2 1 383 384
1 383 2 1 382 384
1 384 0 0
1 385 2 1 386 387
1 386 2 1 385 387
1 387 0 0
1 388 2 1 389 390
1 389 2 1 388 390
1 390 0 0
1 391 2 1 392 393
1 392 2 1 391 393
1 393 0 0
1 394 2 1 395 396
1 395 2 1 394 396
1 396 0 0
1 397 2 1 398 399
1 398 2 1 397 399
1 399 0 0
1 400 2 1 401 402
1 401 2 1 400 402
1 402 0 0
1 403 2 1 404 405
1 404 2 1 403 405
1 405 0 0
1 406 2 1 407 408
1 407 2 1 406 408
1 408 0 0
1 409 2 1 410 411
1 410 2 1 409 411
1 411 0 0
1 412 2 1 413 414
1 413 2 1 412 414
1 414 0 0
1 415 2 1 416 417
1 416 2 1 415 417
1 417 0 0
1 418 2 1 419 420
1 419 2 1 418 420
1 420 0 0
1 421 2 1 422 423
1 422 2 1 421 423
1 423 0 0
1 424 2 1 425 426
1 425 2 1 424 426
1 426 0 0
1 427 2 1 428 429
1 428 2 1 427 429
1 429 0 0
1 430 2 1 431 432
1 431 2 1 430 432
1 432 0 0
1 433 2 1 434 435
1 434 2 1 433 435
1 435 0 0
1 436 2 1 437 438
1 437 2 1 436 438
1 438 0 0
1 439 2 1 440 441
1 440 2 1 439 441
1 441 0 0
1 442 2 1 443 444
1 443 2 1 442 444
1 444 0 0
1 445 2 1 446 447
1 446 2 1 445 447
1 447 0 0
1 448 2 1 449 450
1 449 2 1 448 450
1 450 0 0
1 451 2 1 452 453
1 452 2 1 451 453
1 453 0 0
1 454 2 1 455 456
1 455 2 1 454 456
1 456 0 0
1 457 2 1 458 459
1 458 2 1 457 459
1 459 0 0
1 460 2 1 461 462
1 461 2 1 460 462
1 462 0 0
1 463 2 1 464 465
1 464 2 1 463 465
1 465 0 0
1 466 2 1 467 468
1 467 2 1 466 468
1 468 0 0
1 469 2 1 470 471
1 470 2 1 469 471
1 471 0 0
1 472 2 1 473 474
1 473 2 1 472 474
1 474 0 0
1 475 2 1 476 477
1 476 2 1 475 477
1 477 0 0
1 478 2 1 479 480
1 479 2 1 478 480
1 480 0 0
1 481 2 1 482 483
1 482 2 1 481 483
1 483 0 0
1 484 2 1 485 486
1 485 2 1 484 486
1 486 0 0
1 487 2 1 488 489
1 488 2 1 487 489
1 489 0 0
1 490 2 1 491 492
1 491 2 1 490 492
1 492 0 0
1 493 2 1 494 495
1 494 2 1 493 495
1 495 0 0
1 496 2 1 497 498
1 497 2 1 496 498
1 498 0 0
1 499 2 1 500 501
1 500 2 1 499 501
1 501 0 0
1 502 2 1 503 504
1 503 2 1 502 504
1 504 0 0
1 505 2 1 506 507
1 506 2 1 505 507
1 507 0 0
1 508 2 1 509 510
1 509 2 1 508 510
1 510 0 0
1 511 2 1 512 513
1 512 2 1 511 513
1 513 0 0
1 514 2 1 515 516
1 515 2 1 514 516
1 516 0 0
1 517 2 1 518 519
1 518 2 1 517 519
1 519 0 0
1 520 2 1 521 522
1 521 2 1 520 522
1 522 0 0
1 523 2 1 524 525
1 524 2 1 523 525
1 525 0 0
1 526 2 1 527 528
1 527 2 1 526 528
1 528 0 0
1 529 2 1 530 531
1 530 2 1 529 531
1 531 0 0
1 532 2 1 533 534
1 533 2 1 532 534
1 534 0 0
1 535 2 1 536 537
1 536 2 1 535 537
1 537 0 0
1 538 2 1 539 540
1 539 2 1 538 540
1 540 0 0
1 541 2 1 542 543
1 542 2 1 541 543
1 543 0 0
1 544 2 1 545 546
1 545 2 1 544 546
1 546 0 0
1 547 2 1 548 549
1 548 2 1 547 549
1 549 0 0
1 550 2 1 551 552
1 551 2 1 550 552
1 552 0 0
1 553 2 1 554 555
1 554 2 1 553 555
1 555 0 0
1 556 2 1 557 558
1 557 2 1 556 558
1 558 0 0
1 559 2 1 560 561
1 560 2 1 559 561
1 561 0 0
1 562 2 1 563 564
1 563 2 1 562 564
1 564 0 0
1 565 2 1 566 567
1 566 2 1 565 567
1 567 0 0
1 568 2 1 569 570
1 569 2 1 568 570
1 570 0 0
1 571 2 1 572 573
1 572 2 1 571 573
1 573 0 0
1 574 2 1 575 576
1 575 2 1 574 576
1 576 0 0
1 577 2 1 578 579
1 578 2 1 577 579
1 579 0 0
1 580 2 1 581 582
1 581 2 1 580 582
1 582 0 0
1 583 2 1 584 585
1 584 2 1 583 585
1 585 0 0
1 586 2 1 587 588
1 587 2 1 586 588
1 588 0 0
1 589 2 1 590 591
1 590 2 1 589 591
1 591 0 0
1 592 2 1 593 594
1 593 2 1 592 594
1 594 0 0
1 595 2 1 596 597
1 596 2 1 595 597
1 597 0 0
1 598 2 1 599 600
1 599 2 1 598 600
1 600 0 0
1 601 2 1 602 603
1 602 2 1 601 603
1 603 0 0
1 604 2 1 605 606
1 605 2 1 604 606
1 606 0 0
1 607 2 1 608 609
1 608 2 1 607 609
1 609 0 0
1 610 2 1 611 612
1 611 2 1 610 612
1 612 0 0
1 613 2 1 614 615
1 614 2 1 613 615
1 615 0 0
1 616 2 1 617 618
1 617 2 1 616 618
1 618 0 0
1 619 2 1 620 621
1 620 2 1 619 621
1 621 0 0
1 622 2 1 623 624
1 623 2 1 622 624
1 624 0 0
1 625 2 1 626 627
1 626 2 1 625 627
1 627 0 0
1 628 2 1 629 630
1 629 2 1 628 630
1 630 0 0
1 631 2 1 632 633
1 632 2 1 631 633
1 633 0 0
1 634 2 1 635 636
1 635 2 1 634 636
1 636 0 0
1 637 2 1 638 639
1 638 2 1 637 639
1 639 0 0
1 640 2 1 641 642
1 641 2 1 640 642
1 642 0 0
1 643 2 1 644 645
1 644 2 1 643 645
1 645 0 0
1 646 2 1 647 648
1 647 2 1 646 648
1 648 0 0
1 649 2 1 650 651
1 650 2 1 649 651
1 651 0 0
1 652 2 1 653 654
1 653 2 1 652 654
1 654 0 0
1 655 2 1 656 657
1 656 2 1 655 657
1 657 0 0
1 658 2 1 659 660
1 659 2 1 658 660
1 660 0 0
1 661 2 1 662 663
1 662 2 1 661 663
1 663 0 0
1 664 2 1 665 666
1 665 2 1 664 666
1 666 0 0
1 667 2 1 668 669
1 668 2 1 667 669
1 669 0 0
1 670 2 1 671 672
1 671 2 1 670 672
1 672 0 0
1 673 2 1 674 675
1 674 2 1 673 675
1 675 0 0
1 676 2 1 677 678
1 677 2 1 676 678
1 678 0 0
1 679 2 1 680 681
1 680 2 1 679 681
1 681 0 0
1 682 2 1 683 684
1 683 2 1 682 684
1 684 0 0
1 685 2 1 686 687
1 686 2 1 685 687
1 687 0 0
1 688 2 1 689 690
1 689 2 1 688 690
1 690 0 0
1 691 2 1 692 693
1 692 2 1 691 693
1 693 0 0
1 694 2 1 695 696
1 695 2 1 694 696
1 696 0 0
1 697 2 1 698 699
1 698 2 1 697 699
1 699 0 0
1 700 1 0 697
1 701 1 0 694
1 702 1 0 691
1 703 1 0 688
1 704 1 0 685
1 705 1 0 682
1 706 1 0 679
1 707 1 0 676
1 708 1 0 673
1 709 1 0 670
1 710 1 0 667
1 711 1 0 664
1 712 1 0 661
1 713 1 0 658
1 714 1 0 655
1 715 1 0 652
1 716 1 0 649
1 717 1 0 646
1 718 1 0 643
1 719 1 0 640
1 720 1 0 637
1 721 1 0 634
1 722 1 0 631
1 723 1 0 628
1 724 1 0 625
1 725 1 0 622
1 726 1 0 619
1 727 1 0 616
1 728 1 0 613
1 729 1 0 610
1 730 1 0 607
1 731 1 0 604
1 732 1 0 601
1 733 1 0 598
1 734 1 0 595
1 735 1 0 592
1 736 1 0 589
1 737 1 0 586
1 738 1 0 583
1 739 1 0 580
1 700 1 0 577
1 701 1 0 574
1 702 1 0 571
1 703 1 0 568
1 704 1 0 565
1 705 1 0 562
1 706 1 0 559
1 707 1 0 556
1 708 1 0 553
1 709 1 0 550
1 710 1 0 547
1 711 1 0 544
1 712 1 0 541
1 713 1 0 538
1 714 1 0 535
1 715 1 0 532
1 716 1 0 529
1 717 1 0 526
1 718 1 0 523
1 719 1 0 520
1 720 1 0 517
1 721 1 0 514
1 722 1 0 511
1 723 1 0 508
1 724 1 0 505
1 725 1 0 502
1 726 1 0 499
1 727 1 0 496
1 728 1 0 493
1 729 1 0 490
1 730 1 0 487
1 731 1 0 484
1 732 1 0 481
1 733 1 0 478
1 734 1 0 475
1 735 1 0 472
1 736 1 0 469
1 737 1 0 466
1 738 1 0 463
1 739 1 0 460
1 700 1 0 457
1 701 1 0 454
1 702 1 0 451
1 703 1 0 448
1 704 1 0 445
1 705 1 0 442
1 706 1 0 439
1 707 1 0 436
1 708 1 0 433
1 709 1 0 430
1 710 1 0 427
1 711 1 0 424
1 712 1 0 421
1 713 1 0 418
1 714 1 0 415
1 715 1 0 412
1 716 1 0 409
1 717 1 0 406
1 718 1 0 403
1 719 1 0 400
1 720 1 0 397
1 721 1 0 394
1 722 1 0 391
1 723 1 0 388
1 724 1 0 385
1 725 1 0 382
1 726 1 0 379
1 727 1 0 376
1 728 1 0 373
1 729 1 0 370
1 730 1 0 367
1 731 1 0 364
1 732 1 0 361
1 733 1 0 358
1 734 1 0 355
1 735 1 0 352
1 736 1 0 349
1 737 1 0 346
1 738 1 0 343
1 739 1 0 340
1 1 1 1 739
1 1 1 1 738
1 1 1 1 737
1 1 1 1 736
1 1 1 1 735
1 1 1 1 734
1 1 1 1 733
1 1 1 1 732
1 1 1 1 731
1 1 1 1 730
1 1 1 1 729
1 1 1 1 728
1 1 1 1 727
1 1 1 1 726
1 1 1 1 725
1 1 1 1 724
1 1 1 1 723
1 1 1 1 722
1 1 1 1 721
1 1 1 1 720
1 1 1 1 719
1 1 1 1 718
1 1 1 1 717
1 1 1 1 716
1 1 1 1 715
1 1 1 1 714
1 1 1 1 713
1 1 1 1 712
1 1 1 1 711
1 1 1 1 710
1 1 1 1 709
1 1 1 1 708
1 1 1 1 707
1 1 1 1 706
1 1 1 1 705
1 1 1 1 704
1 1 1 1 703
1 1 1 1 702
1 1 1 1 701
1 1 1 1 700
1 1 2 0 697 577
1 1 2 0 697 457
1 1 2 0 694 574
1 1 2 0 694 454
1 1 2 0 691 571
1 1 2 0 691 451
1 1 2 0 688 568
1 1 2 0 688 448
1 1 2 0 685 565
1 1 2 0 685 445
1 1 2 0 682 562
1 1 2 0 682 442
1 1 2 0 679 559
1 1 2 0 679 439
1 1 2 0 676 556
1 1 2 0 676 436
1 1 2 0 673 553
1 1 2 0 673 433
1 1 2 0 670 550
1 1 2 0 670 430
1 1 2 0 667 547
1 1 2 0 667 427
1 1 2 0 664 544
1 1 2 0 664 424
1 1 2 0 661 541
1 1 2 0 661 421
1 1 2 0 658 538
1 1 2 0 658 418
1 1 2 0 655 535
1 1 2 0 655 415
1 1 2 0 652 532
1 1 2 0 652 412
1 1 2 0 649 529
1 1 2 0 649 409
1 1 2 0 646 526
1 1 2 0 646 406
1 1 2 0 643 523
1 1 2 0 643 403
1 1 2 0 640 520
1 1 2 0 640 400
1 1 2 0 637 517
1 1 2 0 637 397
1 1 2 0 634 514
1 1 2 0 634 394
1 1 2 0 631 511
1 1 2 0 631 391
1 1 2 0 628 508
1 1 2 0 628 388
1 1 2 0 625 505
1 1 2 0 625 385
1 1 2 0 622 502
1 1 2 0 622 382
1 1 2 0 619 499
1 1 2 0 619 379
1 1 2 0 616 496
1 1 2 0 616 376
1 1 2 0 613 493
1 1 2 0 613 373
1 1 2 0 610 490
1 1 2 0 610 370
1 1 2 0 607 487
1 1 2 0 607 367
1 1 2 0 604 484
1 1 2 0 604 364
1 1 2 0 601 481
1 1 2 0 601 361
1 1 2 0 598 478
1 1 2 0 598 358
1 1 2 0 595 475
1 1 2 0 595 355
1 1 2 0 592 472
1 1 2 0 592 352
1 1 2 0 589 469
1 1 2 0 589 349
1 1 2 0 586 466
1 1 2 0 586 346
1 1 2 0 583 463
1 1 2 0 583 343
1 1 2 0 580 460
1 1 2 0 580 340
1 1 2 0 577 697
1 1 2 0 577 457
1 1 2 0 574 694
1 1 2 0 574 454
1 1 2 0 571 691
1 1 2 0 571 451
1 1 2 0 568 688
1 1 2 0 568 448
1 1 2 0 565 685
1 1 2 0 565 445
1 1 2 0 562 682
1 1 2 0 562 442
1 1 2 0 559 679
1 1 2 0 559 439
1 1 2 0 556 676
1 1 2 0 556 436
1 1 2 0 553 673
1 1 2 0 553 433
1 1 2 0 550 670
1 1 2 0 550 430
1 1 2 0 547 667
1 1 2 0 547 427
1 1 2 0 544 664
1 1 2 0 544 424
1 1 2 0 541 661
1 1 2 0 541 421
1 1 2 0 538 658
1 1 2 0 538 418
1 1 2 0 535 655
1 1 2 0 535 415
1 1 2 0 532 652
1 1 2 0 532 412
1 1 2 0 529 649
1 1 2 0 529 409
1 1 2 0 526 646
1 1 2 0 526 406
1 1 2 0 523 643
1 1 2 0 523 403
1 1 2 0 520 640
1 1 2 0 520 400
1 1 2 0 517 637
1 1 2 0 517 397
1 1 2 0 514 634
1 1 2 0 514 394
1 1 2 0 511 631
1 1 2 0 511 391
1 1 2 0 508 628
1 1 2 0 508 388
1 1 2 0 505 625
1 1 2 0 505 385
1 1 2 0 502 622
1 1 2 0 502 382
1 1 2 0 499 619
1 1 2 0 499 379
1 1 2 0 496 616
1 1 2 0 496 376
1 1 2 0 493 613
1 1 2 0 493 373
1 1 2 0 490 610
1 1 2 0 490 370
1 1 2 0 487 607
1 1 2 0 487 367
1 1 2 0 484 604
1 1 2 0 484 364
1 1 2 0 481 601
1 1 2 0 481 361
1 1 2 0 478 598
1 1 2 0 478 358
1 1 2 0 475 595
1 1 2 0 475 355
1 1 2 0 472 592
1 1 2 0 472 352
1 1 2 0 469 589
1 1 2 0 469 349
1 1 2 0 466 586
1 1 2 0 466 346
1 1 2 0 463 583
1 1 2 0 463 343
1 1 2 0 460 580
1 1 2 0 460 340
1 1 2 0 457 697
1 1 2 0 457 577
1 1 2 0 454 694
1 1 2 0 454 574
1 1 2 0 451 691
1 1 2 0 451 571
1 1 2 0 448 688
1 1 2 0 448 568
1 1 2 0 445 685
1 1 2 0 445 565
1 1 2 0 442 682
1 1 2 0 442 562
1 1 2 0 439 679
1 1 2 0 439 559
1 1 2 0 436 676
1 1 2 0 436 556
1 1 2 0 433 673
1 1 2 0 433 553
1 1 2 0 430 670
1 1 2 0 430 550
1 1 2 0 427 667
1 1 2 0 427 547
1 1 2 0 424 664
1 1 2 0 424 544
1 1 2 0 421 661
1 1 2 0 421 541
1 1 2 0 418 658
1 1 2 0 418 538
1 1 2 0 415 655
1 1 2 0 415 535
1 1 2 0 412 652
1 1 2 0 412 532
1 1 2 0 409 649
1 1 2 0 409 529
1 1 2 0 406 646
1 1 2 0 406 526
1 1 2 0 403 643
1 1 2 0 403 523
1 1 2 0 400 640
1 1 2 0 400 520
1 1 2 0 397 637
1 1 2 0 397 517
1 1 2 0 394 634
1 1 2 0 394 514
1 1 2 0 391 631
1 1 2 0 391 511
1 1 2 0 388 628
1 1 2 0 388 508
1 1 2 0 385 625
1 1 2 0 385 505
1 1 2 0 382 622
1 1 2 0 382 502
1 1 2 0 379 619
1 1 2 0 379 499
1 1 2 0 376 616
1 1 2 0 376 496
1 1 2 0 373 613
1 1 2 0 373 493
1 1 2 0 370 610
1 1 2 0 370 490
1 1 2 0 367 607
1 1 2 0 367 487
1 1 2 0 364 604
1 1 2 0 364 484
1 1 2 0 361 601
1 1 2 0 361 481
1 1 2 0 358 598
1 1 2 0 358 478
1 1 2 0 355 595
1 1 2 0 355 475
1 1 2 0 352 592
1 1 2 0 352 472
1 1 2 0 349 589
1 1 2 0 349 469
1 1 2 0 346 586
1 1 2 0 346 466
1 1 2 0 343 583
1 1 2 0 343 463
1 1 2 0 340 580
1 1 2 0 340 460
1 1 2 0 697 691
1 1 2 0 697 664
1 1 2 0 697 661
1 1 2 0 697 634
1 1 2 0 697 631
1 1 2 0 697 622
1 1 2 0 697 616
1 1 2 0 697 604
1 1 2 0 694 691
1 1 2 0 694 679
1 1 2 0 694 670
1 1 2 0 694 655
1 1 2 0 694 640
1 1 2 0 694 619
1 1 2 0 694 592
1 1 2 0 694 583
1 1 2 0 691 682
1 1 2 0 691 658
1 1 2 0 691 649
1 1 2 0 691 640
1 1 2 0 691 619
1 1 2 0 691 598
1 1 2 0 688 661
1 1 2 0 688 649
1 1 2 0 688 646
1 1 2 0 688 622
1 1 2 0 688 616
1 1 2 0 688 589
1 1 2 0 685 679
1 1 2 0 685 661
1 1 2 0 685 646
1 1 2 0 685 637
1 1 2 0 685 619
1 1 2 0 685 601
1 1 2 0 685 592
1 1 2 0 682 679
1 1 2 0 682 670
1 1 2 0 682 667
1 1 2 0 682 661
1 1 2 0 682 640
1 1 2 0 682 616
1 1 2 0 682 610
1 1 2 0 679 670
1 1 2 0 679 658
1 1 2 0 679 625
1 1 2 0 679 622
1 1 2 0 679 616
1 1 2 0 679 604
1 1 2 0 679 601
1 1 2 0 676 673
1 1 2 0 676 658
1 1 2 0 676 655
1 1 2 0 676 649
1 1 2 0 676 586
1 1 2 0 676 583
1 1 2 0 673 670
1 1 2 0 673 664
1 1 2 0 673 658
1 1 2 0 673 652
1 1 2 0 673 610
1 1 2 0 673 580
1 1 2 0 670 667
1 1 2 0 670 658
1 1 2 0 670 646
1 1 2 0 670 610
1 1 2 0 670 607
1 1 2 0 670 583
1 1 2 0 670 580
1 1 2 0 667 637
1 1 2 0 667 613
1 1 2 0 667 598
1 1 2 0 667 595
1 1 2 0 664 646
1 1 2 0 664 634
1 1 2 0 664 628
1 1 2 0 664 625
1 1 2 0 664 604
1 1 2 0 664 598
1 1 2 0 664 583
1 1 2 0 661 658
1 1 2 0 661 652
1 1 2 0 661 646
1 1 2 0 661 631
1 1 2 0 661 610
1 1 2 0 661 586
1 1 2 0 661 583
1 1 2 0 658 634
1 1 2 0 658 628
1 1 2 0 658 604
1 1 2 0 658 586
1 1 2 0 658 583
1 1 2 0 655 643
1 1 2 0 655 616
1 1 2 0 655 604
1 1 2 0 655 601
1 1 2 0 652 646
1 1 2 0 652 643
1 1 2 0 652 640
1 1 2 0 652 616
1 1 2 0 652 583
1 1 2 0 649 646
1 1 2 0 649 643
1 1 2 0 649 595
1 1 2 0 649 586
1 1 2 0 646 640
1 1 2 0 646 628
1 1 2 0 646 598
1 1 2 0 646 595
1 1 2 0 646 580
1 1 2 0 643 637
1 1 2 0 643 628
1 1 2 0 643 625
1 1 2 0 643 598
1 1 2 0 640 625
1 1 2 0 640 595
1 1 2 0 640 592
1 1 2 0 637 631
1 1 2 0 637 604
1 1 2 0 637 598
1 1 2 0 634 622
1 1 2 0 634 616
1 1 2 0 634 586
1 1 2 0 631 628
1 1 2 0 631 613
1 1 2 0 631 607
1 1 2 0 631 604
1 1 2 0 631 595
1 1 2 0 631 592
1 1 2 0 628 625
1 1 2 0 628 616
1 1 2 0 628 613
1 1 2 0 628 610
1 1 2 0 628 598
1 1 2 0 625 622
1 1 2 0 625 601
1 1 2 0 625 586
1 1 2 0 622 607
1 1 2 0 622 595
1 1 2 0 622 589
1 1 2 0 622 586
1 1 2 0 622 580
1 1 2 0 619 613
1 1 2 0 619 610
1 1 2 0 616 613
1 1 2 0 616 601
1 1 2 0 616 589
1 1 2 0 613 610
1 1 2 0 613 580
1 1 2 0 610 607
1 1 2 0 610 604
1 1 2 0 610 592
1 1 2 0 604 595
1 1 2 0 601 598
1 1 2 0 601 586
1 1 2 0 598 583
1 1 2 0 595 592
1 1 2 0 595 583
1 1 2 0 592 589
1 1 2 0 589 586
1 1 2 0 589 580
1 1 2 0 586 583
1 1 2 0 577 571
1 1 2 0 577 544
1 1 2 0 577 541
1 1 2 0 577 514
1 1 2 0 577 511
1 1 2 0 577 502
1 1 2 0 577 496
1 1 2 0 577 484
1 1 2 0 574 571
1 1 2 0 574 559
1 1 2 0 574 550
1 1 2 0 574 535
1 1 2 0 574 520
1 1 2 0 574 499
1 1 2 0 574 472
1 1 2 0 574 463
1 1 2 0 571 562
1 1 2 0 571 538
1 1 2 0 571 529
1 1 2 0 571 520
1 1 2 0 571 499
1 1 2 0 571 478
1 1 2 0 568 541
1 1 2 0 568 529
1 1 2 0 568 526
1 1 2 0 568 502
1 1 2 0 568 496
1 1 2 0 568 469
1 1 2 0 565 559
1 1 2 0 565 541
1 1 2 0 565 526
1 1 2 0 565 517
1 1 2 0 565 499
1 1 2 0 565 481
1 1 2 0 565 472
1 1 2 0 562 559
1 1 2 0 562 550
1 1 2 0 562 547
1 1 2 0 562 541
1 1 2 0 562 520
1 1 2 0 562 496
1 1 2 0 562 490
1 1 2 0 559 550
1 1 2 0 559 538
1 1 2 0 559 505
1 1 2 0 559 502
1 1 2 0 559 496
1 1 2 0 559 484
1 1 2 0 559 481
1 1 2 0 556 553
1 1 2 0 556 538
1 1 2 0 556 535
1 1 2 0 556 529
1 1 2 0 556 466
1 1 2 0 556 463
1 1 2 0 553 550
1 1 2 0 553 544
1 1 2 0 553 538
1 1 2 0 553 532
1 1 2 0 553 490
1 1 2 0 553 460
1 1 2 0 550 547
1 1 2 0 550 538
1 1 2 0 550 526
1 1 2 0 550 490
1 1 2 0 550 487
1 1 2 0 550 463
1 1 2 0 550 460
1 1 2 0 547 517
1 1 2 0 547 493
1 1 2 0 547 478
1 1 2 0 547 475
1 1 2 0 544 526
1 1 2 0 544 514
1 1 2 0 544 508
1 1 2 0 544 505
1 1 2 0 544 484
1 1 2 0 544 478
1 1 2 0 544 463
1 1 2 0 541 538
1 1 2 0 541 532
1 1 2 0 541 526
1 1 2 0 541 511
1 1 2 0 541 490
1 1 2 0 541 466
1 1 2 0 541 463
1 1 2 0 538 514
1 1 2 0 538 508
1 1 2 0 538 484
1 1 2 0 538 466
1 1 2 0 538 463
1 1 2 0 535 523
1 1 2 0 535 496
1 1 2 0 535 484
1 1 2 0 535 481
1 1 2 0 532 526
1 1 2 0 532 523
1 1 2 0 532 520
1 1 2 0 532 496
1 1 2 0 532 463
1 1 2 0 529 526
1 1 2 0 529 523
1 1 2 0 529 475
1 1 2 0 529 466
1 1 2 0 526 520
1 1 2 0 526 508
1 1 2 0 526 478
1 1 2 0 526 475
1 1 2 0 526 460
1 1 2 0 523 517
1 1 2 0 523 508
1 1 2 0 523 505
1 1 2 0 523 478
1 1 2 0 520 505
1 1 2 0 520 475
1 1 2 0 520 472
1 1 2 0 517 511
1 1 2 0 517 484
1 1 2 0 517 478
1 1 2 0 514 502
1 1 2 0 514 496
1 1 2 0 514 466
1 1 2 0 511 508
1 1 2 0 511 493
1 1 2 0 511 487
1 1 2 0 511 484
1 1 2 0 511 475
1 1 2 0 511 472
1 1 2 0 508 505
1 1 2 0 508 496
1 1 2 0 508 493
1 1 2 0 508 490
1 1 2 0 508 478
1 1 2 0 505 502
1 1 2 0 505 481
1 1 2 0 505 466
1 1 2 0 502 487
1 1 2 0 502 475
1 1 2 0 502 469
1 1 2 0 502 466
1 1 2 0 502 460
1 1 2 0 499 493
1 1 2 0 499 490
1 1 2 0 496 493
1 1 2 0 496 481
1 1 2 0 496 469
1 1 2 0 493 490
1 1 2 0 493 460
1 1 2 0 490 487
1 1 2 0 490 484
1 1 2 0 490 472
1 1 2 0 484 475
1 1 2 0 481 478
1 1 2 0 481 466
1 1 2 0 478 463
1 1 2 0 475 472
1 1 2 0 475 463
1 1 2 0 472 469
1 1 2 0 469 466
1 1 2 0 469 460
1 1 2 0 466 463
1 1 2 0 457 451
1 1 2 0 457 424
1 1 2 0 457 421
1 1 2 0 457 394
1 1 2 0 457 391
1 1 2 0 457 382
1 1 2 0 457 376
1 1 2 0 457 364
1 1 2 0 454 451
1 1 2 0 454 439
1 1 2 0 454 430
1 1 2 0 454 415
1 1 2 0 454 400
1 1 2 0 454 379
1 1 2 0 454 352
1 1 2 0 454 343
1 1 2 0 451 442
1 1 2 0 451 418
1 1 2 0 451 409
1 1 2 0 451 400
1 1 2 0 451 379
1 1 2 0 451 358
1 1 2 0 448 421
1 1 2 0 448 409
1 1 2 0 448 406
1 1 2 0 448 382
1 1 2 0 448 376
1 1 2 0 448 349
1 1 2 0 445 439
1 1 2 0 445 421
1 1 2 0 445 406
1 1 2 0 445 397
1 1 2 0 445 379
1 1 2 0 445 361
1 1 2 0 445 352
1 1 2 0 442 439
1 1 2 0 442 430
1 1 2 0 442 427
1 1 2 0 442 421
1 1 2 0 442 400
1 1 2 0 442 376
1 1 2 0 442 370
1 1 2 0 439 430
1 1 2 0 439 418
1 1 2 0 439 385
1 1 2 0 439 382
1 1 2 0 439 376
1 1 2 0 439 364
1 1 2 0 439 361
1 1 2 0 436 433
1 1 2 0 436 418
1 1 2 0 436 415
1 1 2 0 436 409
1 1 2 0 436 346
1 1 2 0 436 343
1 1 2 0 433 430
1 1 2 0 433 424
1 1 2 0 433 418
1 1 2 0 433 412
1 1 2 0 433 370
1 1 2 0 433 340
1 1 2 0 430 427
1 1 2 0 430 418
1 1 2 0 430 406
1 1 2 0 430 370
1 1 2 0 430 367
1 1 2 0 430 343
1 1 2 0 430 340
1 1 2 0 427 397
1 1 2 0 427 373
1 1 2 0 427 358
1 1 2 0 427 355
1 1 2 0 424 406
1 1 2 0 424 394
1 1 2 0 424 388
1 1 2 0 424 385
1 1 2 0 424 364
1 1 2 0 424 358
1 1 2 0 424 343
1 1 2 0 421 418
1 1 2 0 421 412
1 1 2 0 421 406
1 1 2 0 421 391
1 1 2 0 421 370
1 1 2 0 421 346
1 1 2 0 421 343
1 1 2 0 418 394
1 1 2 0 418 388
1 1 2 0 418 364
1 1 2 0 418 346
1 1 2 0 418 343
1 1 2 0 415 403
1 1 2 0 415 376
1 1 2 0 415 364
1 1 2 0 415 361
1 1 2 0 412 406
1 1 2 0 412 403
1 1 2 0 412 400
1 1 2 0 412 376
1 1 2 0 412 343
1 1 2 0 409 406
1 1 2 0 409 403
1 1 2 0 409 355
1 1 2 0 409 346
1 1 2 0 406 400
1 1 2 0 406 388
1 1 2 0 406 358
1 1 2 0 406 355
1 1 2 0 406 340
1 1 2 0 403 397
1 1 2 0 403 388
1 1 2 0 403 385
1 1 2 0 403 358
1 1 2 0 400 385
1 1 2 0 400 355
1 1 2 0 400 352
1 1 2 0 397 391
1 1 2 0 397 364
1 1 2 0 397 358
1 1 2 0 394 382
1 1 2 0 394 376
1 1 2 0 394 346
1 1 2 0 391 388
1 1 2 0 391 373
1 1 2 0 391 367
1 1 2 0 391 364
1 1 2 0 391 355
1 1 2 0 391 352
1 1 2 0 388 385
1 1 2 0 388 376
1 1 2 0 388 373
1 1 2 0 388 370
1 1 2 0 388 358
1 1 2 0 385 382
1 1 2 0 385 361
1 1 2 0 385 346
1 1 2 0 382 367
1 1 2 0 382 355
1 1 2 0 382 349
1 1 2 0 382 346
1 1 2 0 382 340
1 1 2 0 379 373
1 1 2 0 379 370
1 1 2 0 376 373
1 1 2 0 376 361
1 1 2 0 376 349
1 1 2 0 373 370
1 1 2 0 373 340
1 1 2 0 370 367
1 1 2 0 370 364
1 1 2 0 370 352
1 1 2 0 364 355
1 1 2 0 361 358
1 1 2 0 361 346
1 1 2 0 358 343
1 1 2 0 355 352
1 1 2 0 355 343
1 1 2 0 352 349
1 1 2 0 349 346
1 1 2 0 349 340
1 1 2 0 346 343
0
340 col(39,3)
343 col(38,3)
346 col(37,3)
349 col(36,3)
352 col(35,3)
355 col(34,3)
358 col(33,3)
361 col(32,3)
364 col(31,3)
367 col(30,3)
370 col(29,3)
373 col(28,3)
376 col(27,3)
379 col(26,3)
382 col(25,3)
385 col(24,3)
388 col(23,3)
391 col(22,3)
394 col(21,3)
397 col(20,3)
400 col(19,3)
403 col(18,3)
406 col(17,3)
409 col(16,3)
412 col(15,3)
415 col(14,3)
418 col(13,3)
421 col(12,3)
424 col(11,3)
427 col(10,3)
430 col(9,3)
433 col(8,3)
436 col(7,3)
439 col(6,3)
442 col(5,3)
445 col(4,3)
448 col(3,3)
451 col(2,3)
454 col(1,3)
457 col(0,3)
460 col(39,2)
463 col(38,2)
466 col(37,2)
469 col(36,2)
472 col(35,2)
475 col(34,2)
478 col(33,2)
481 col(32,2)
484 col(31,2)
487 col(30,2)
490 col(29,2)
493 col(28,2)
496 col(27,2)
499 col(26,2)
502 col(25,2)
505 col(24,2)
508 col(23,2)
511 col(22,2)
514 col(21,2)
517 col(20,2)
520 col(19,2)
523 col(18,2)
526 col(17,2)
529 col(16,2)
532 col(15,2)
535 col(14,2)
538 col(13,2)
541 col(12,2)
544 col(11,2)
547 col(10,2)
550 col(9,2)
553 col(8,2)
556 col(7,2)
559 col(6,2)
562 col(5,2)
565 col(4,2)
568 col(3,2)
571 col(2,2)
574 col(1,2)
577 col(0,2)
580 col(39,1)
583 col(38,1)
586 col(37,1)
589 col(36,1)
592 col(35,1)
595 col(34,1)
598 col(33,1)
601 col(32,1)
604 col(31,1)
607 col(30,1)
610 col(29,1)
613 col(28,1)
616 col(27,1)
619 col(26,1)
622 col(25,1)
625 col(24,1)
628 col(23,1)
631 col(22,1)
634 col(21,1)
637 col(20,1)
640 col(19,1)
643 col(18,1)
646 col(17,1)
649 col(16,1)
652 col(15,1)
655 col(14,1)
658 col(13,1)
661 col(12,1)
664 col(11,1)
667 col(10,1)
670 col(9,1)
673 col(8,1)
676 col(7,1)
679 col(6,1)
682 col(5,1)
685 col(4,1)
688 col(3,1)
691 col(2,1)
694 col(1,1)
697 col(0,1)
0
B+
0
B-
1
0
1
"""
output = """
"""
| [
"carminedodaro@gmail.com"
] | carminedodaro@gmail.com |
ce3dcdb6d1d8778e7ef14a5fadca90b5f9c643d5 | 27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f | /plugins/cliconf/dellos9.py | ea36ef5694e8fa93800086746101687f43395068 | [] | no_license | coll-test/notstdlib.moveitallout | eb33a560070bbded5032385d0aea2f3cf60e690b | 0987f099b783c6cf977db9233e1c3d9efcbcb3c7 | refs/heads/master | 2020-12-19T22:28:33.369557 | 2020-01-23T18:51:26 | 2020-01-23T18:51:26 | 235,865,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,293 | py | #
# (c) 2017 Red Hat Inc.
#
# (c) 2017 Dell EMC.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
---
cliconf: dellos9
short_description: Use dellos9 cliconf to run command on Dell OS9 platform
description:
- This dellos9 plugin provides low level abstraction apis for
sending and receiving CLI commands from Dell OS9 network devices.
'''
import re
import json
from itertools import chain
from ansible.errors import AnsibleConnectionFailure
from ansible_collections.notstdlib.moveitallout.plugins.module_utils._text import to_bytes, to_text
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.common._collections_compat import Mapping
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase, enable_mode
class Cliconf(CliconfBase):
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'dellos9'
reply = self.get('show version')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'Software Version (\S+)', data)
if match:
device_info['network_os_version'] = match.group(1)
match = re.search(r'System Type (\S+)', data, re.M)
if match:
device_info['network_os_model'] = match.group(1)
reply = self.get('show running-config | grep hostname')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'^hostname (.+)', data, re.M)
if match:
device_info['network_os_hostname'] = match.group(1)
return device_info
@enable_mode
def get_config(self, source='running', format='text', flags=None):
if source not in ('running', 'startup'):
return self.invalid_params("fetching configuration from %s is not supported" % source)
# if source == 'running':
# cmd = 'show running-config all'
else:
cmd = 'show startup-config'
return self.send_command(cmd)
@enable_mode
def edit_config(self, command):
for cmd in chain(['configure terminal'], to_list(command), ['end']):
self.send_command(cmd)
def get(self, command, prompt=None, answer=None, sendonly=False, newline=True, check_all=False):
return self.send_command(command=command, prompt=prompt, answer=answer, sendonly=sendonly, newline=newline, check_all=check_all)
def get_capabilities(self):
result = super(Cliconf, self).get_capabilities()
return json.dumps(result)
def run_commands(self, commands=None, check_rc=True):
if commands is None:
raise ValueError("'commands' value is required")
responses = list()
for cmd in to_list(commands):
if not isinstance(cmd, Mapping):
cmd = {'command': cmd}
output = cmd.pop('output', None)
if output:
raise ValueError("'output' value %s is not supported for run_commands" % output)
try:
out = self.send_command(**cmd)
except AnsibleConnectionFailure as e:
if check_rc:
raise
out = getattr(e, 'err', to_text(e))
responses.append(out)
return responses
def set_cli_prompt_context(self):
"""
Make sure we are in the operational cli mode
:return: None
"""
if self._connection.connected:
self._update_cli_prompt_context(config_context=')#')
| [
"wk@sydorenko.org.ua"
] | wk@sydorenko.org.ua |
a4c54b9968e9bed0406c86ea81191b639e5089fa | e14605612c96d450bea1fca7fa9963105b6452fb | /tensorflow/python/kernel_tests/constant_op_test.py | 92f9b5fe4a4b80993d8afb3606e0df80a3cdb628 | [
"Apache-2.0"
] | permissive | Yangqing/tensorflow | 0bb9259398eac98dc8e9f48cc0b7506f4d5f8a24 | 18792c1fce7e12d36c0f1704cff15ed820cc6ff5 | refs/heads/master | 2023-06-20T21:11:52.483377 | 2015-11-11T21:16:55 | 2015-11-11T21:16:55 | 45,876,905 | 2 | 2 | null | 2015-11-11T21:16:55 | 2015-11-10T00:38:20 | C++ | UTF-8 | Python | false | false | 18,692 | py | """Tests for ConstantOp."""
import tensorflow.python.platform
import numpy as np
import tensorflow as tf
from tensorflow.python.ops import gen_array_ops
class ConstantTest(tf.test.TestCase):
def _testCpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=False):
tf_ans = tf.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testGpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=True):
tf_ans = tf.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testAll(self, x):
self._testCpu(x)
self._testGpu(x)
def testFloat(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float32))
self._testAll(np.empty((2, 0, 5)).astype(np.float32))
def testDouble(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float64))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float64))
self._testAll(np.empty((2, 0, 5)).astype(np.float64))
def testInt32(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int32))
self._testAll(
(100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(np.int32))
self._testAll(np.empty((2, 0, 5)).astype(np.int32))
def testInt64(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int64))
self._testAll(
(100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(np.int64))
self._testAll(np.empty((2, 0, 5)).astype(np.int64))
def testSComplex(self):
self._testAll(
np.complex(1, 2) * np.arange(-15, 15).reshape([2, 3, 5]).astype(
np.complex64))
self._testAll(np.complex(
1, 2) * np.random.normal(size=30).reshape([2, 3, 5]).astype(
np.complex64))
self._testAll(np.empty((2, 0, 5)).astype(np.complex64))
def testString(self):
self._testCpu(np.array([str(x) for x in np.arange(-15, 15)]).reshape(
[2, 3, 5]))
self._testCpu(np.empty((2, 0, 5)).astype(np.str_))
def testStringWithNulls(self):
with self.test_session():
val = tf.convert_to_tensor("\0\0\0\0").eval()
self.assertEqual(len(val), 4)
self.assertEqual(val, "\0\0\0\0")
with self.test_session():
val = tf.convert_to_tensor("xx\0xx").eval()
self.assertEqual(len(val), 5)
self.assertAllEqual(val, "xx\0xx")
nested = [["\0\0\0\0", "xx\0xx"], ["\0_\0_\0_\0", "\0"]]
with self.test_session():
val = tf.convert_to_tensor(nested).eval()
# NOTE(mrry): Do not use assertAllEqual, because it converts nested to a
# numpy array, which loses the null terminators.
self.assertEqual(val.tolist(), nested)
def testExplicitShapeNumPy(self):
with tf.Graph().as_default():
c = tf.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32),
shape=[2, 3, 5])
self.assertEqual(c.get_shape(), [2, 3, 5])
def testImplicitShapeNumPy(self):
with tf.Graph().as_default():
c = tf.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self.assertEqual(c.get_shape(), [2, 3, 5])
def testExplicitShapeList(self):
with tf.Graph().as_default():
c = tf.constant([1, 2, 3, 4, 5, 6, 7], shape=[7])
self.assertEqual(c.get_shape(), [7])
def testImplicitShapeList(self):
with tf.Graph().as_default():
c = tf.constant([1, 2, 3, 4, 5, 6, 7])
self.assertEqual(c.get_shape(), [7])
def testExplicitShapeNumber(self):
with tf.Graph().as_default():
c = tf.constant(1, shape=[1])
self.assertEqual(c.get_shape(), [1])
def testImplicitShapeNumber(self):
with tf.Graph().as_default():
c = tf.constant(1)
self.assertEqual(c.get_shape(), [])
def testShapeInconsistent(self):
with tf.Graph().as_default():
c = tf.constant([1, 2, 3, 4, 5, 6, 7], shape=[10])
self.assertEqual(c.get_shape(), [10])
# pylint: disable=g-long-lambda
def testShapeWrong(self):
with tf.Graph().as_default():
with self.assertRaisesWithPredicateMatch(
ValueError,
lambda e: ("Too many elements provided. Needed at most 5, "
"but received 7" == str(e))):
tf.constant([1, 2, 3, 4, 5, 6, 7], shape=[5])
# pylint: enable=g-long-lambda
def testTooLargeConstant(self):
with tf.Graph().as_default():
large_array = np.zeros((512, 1024, 1024), dtype=np.float32)
with self.assertRaisesRegexp(
ValueError,
"Cannot create an Operation with a NodeDef larger than 2GB."):
c = tf.constant(large_array)
def testTooLargeGraph(self):
with tf.Graph().as_default() as g:
large_array = np.zeros((256, 1024, 1024), dtype=np.float32)
c = tf.constant(large_array)
d = tf.constant(large_array)
with self.assertRaisesRegexp(
ValueError, "GraphDef cannot be larger than 2GB."):
g.as_graph_def()
def testSparseValuesRaiseErrors(self):
with self.assertRaisesRegexp(ValueError,
"setting an array element with a sequence"):
c = tf.constant([[1, 2], [3]], dtype=tf.int32)
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = tf.constant([[1, 2], [3]])
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = tf.constant([[1, 2], [3], [4, 5]])
class AsTensorTest(tf.test.TestCase):
def testAsTensorForTensorInput(self):
with tf.Graph().as_default():
t = tf.constant(10.0)
x = tf.convert_to_tensor(t)
self.assertIs(t, x)
def testAsTensorForNonTensorInput(self):
with tf.Graph().as_default():
x = tf.convert_to_tensor(10.0)
self.assertTrue(isinstance(x, tf.Tensor))
def testAsTensorForShapeInput(self):
with self.test_session():
x = tf.convert_to_tensor(tf.TensorShape([]))
self.assertEqual(tf.int32, x.dtype)
self.assertAllEqual([], x.eval())
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3]))
self.assertEqual(tf.int32, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3]), dtype=tf.int64)
self.assertEqual(tf.int64, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = tf.reshape(tf.zeros([6]), tf.TensorShape([2, 3]))
self.assertAllEqual([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], x.eval())
with self.assertRaisesRegexp(ValueError, "partially known"):
tf.convert_to_tensor(tf.TensorShape(None))
with self.assertRaisesRegexp(ValueError, "partially known"):
tf.convert_to_tensor(tf.TensorShape([1, None, 64]))
with self.assertRaises(TypeError):
tf.convert_to_tensor(tf.TensorShape([1, 2, 3]), dtype=tf.float32)
def testAsTensorForDimensionInput(self):
with self.test_session():
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3])[1])
self.assertEqual(tf.int32, x.dtype)
self.assertAllEqual(2, x.eval())
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3])[1], dtype=tf.int64)
self.assertEqual(tf.int64, x.dtype)
self.assertAllEqual(2, x.eval())
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
tf.convert_to_tensor(tf.TensorShape(None)[1])
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
tf.convert_to_tensor(tf.TensorShape([1, None, 64])[1])
with self.assertRaises(TypeError):
tf.convert_to_tensor(tf.TensorShape([1, 2, 3])[1], dtype=tf.float32)
class IdentityOpTest(tf.test.TestCase):
def testIdTensor(self):
with tf.Graph().as_default():
x = tf.constant(2.0, shape=[6], name="input")
id_op = tf.identity(x, name="id")
self.assertTrue(isinstance(id_op.op.inputs[0], tf.Tensor))
self.assertProtoEquals(
"name: 'id' op: 'Identity' input: 'input' "
"attr { key: 'T' value { type: DT_FLOAT } }", id_op.op.node_def)
class ZerosTest(tf.test.TestCase):
def _Zeros(self, shape):
with self.test_session():
ret = tf.zeros(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(np.array_equal(self._Zeros([2, 3]), np.array([[0] * 3] *
2)))
def testDynamicSizes(self):
np_ans = np.array([[0] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = tf.fill([2, 3], 12., name="fill")
# Constructs a tensor of zeros of the same dimensions as "d".
z = tf.zeros(tf.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testDtype(self):
with self.test_session():
d = tf.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = tf.zeros([2, 3])
self.assertEquals(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
z = tf.zeros(tf.shape(d))
self.assertEquals(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
# Test explicit type control
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.int64]:
z = tf.zeros([2, 3], dtype=dtype)
self.assertEquals(z.dtype, dtype)
self.assertEquals([2, 3], z.get_shape())
z = tf.zeros(tf.shape(d), dtype=dtype)
self.assertEquals(z.dtype, dtype)
self.assertEquals([2, 3], z.get_shape())
class ZerosLikeTest(tf.test.TestCase):
def testZerosLike(self):
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.int64]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = tf.constant(np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = tf.zeros_like(d)
# Test that the type is correct
self.assertEquals(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[0] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testGenZerosLike(self):
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.int64]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = tf.constant(np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = gen_array_ops._zeros_like(d)
# Test that the type is correct
self.assertEquals(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[0] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
class OnesTest(tf.test.TestCase):
def _Ones(self, shape):
with self.test_session():
ret = tf.ones(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(np.array_equal(self._Ones([2, 3]), np.array([[1] * 3] * 2)))
def testDynamicSizes(self):
np_ans = np.array([[1] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = tf.fill([2, 3], 12., name="fill")
# Constructs a tensor of ones of the same dimensions as "d".
z = tf.ones(tf.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testDtype(self):
with self.test_session():
d = tf.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = tf.ones([2, 3])
self.assertEquals(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
z = tf.ones(tf.shape(d))
self.assertEquals(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
# Test explicit type control
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.int64]:
z = tf.ones([2, 3], dtype=dtype)
self.assertEquals(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
z = tf.ones(tf.shape(d), dtype=dtype)
self.assertEquals(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
class OnesLikeTest(tf.test.TestCase):
def testOnesLike(self):
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.int64]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = tf.constant(np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = tf.ones_like(d)
# Test that the type is correct
self.assertEquals(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[1] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testGenOnesLike(self):
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.int64]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = tf.constant(np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = tf.ones_like(d)
# Test that the type is correct
self.assertEquals(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[1] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
class FillTest(tf.test.TestCase):
def _compare(self, dims, val, np_ans, use_gpu):
with self.test_session(use_gpu=use_gpu):
tf_ans = tf.fill(dims, val, name="fill")
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
# Fill does not set the shape.
# self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, dims, val, np_ans):
self._compare(dims, val, np_ans, False)
self._compare(dims, val, np_ans, True)
def testFillFloat(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillDouble(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt32(self):
np_ans = np.array([[42] * 3] * 2).astype(np.int32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt64(self):
np_ans = np.array([[-42] * 3] * 2).astype(np.int64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillComplex(self):
np_ans = np.array([[0.15] * 3] * 2).astype(np.complex64)
self._compare([2, 3], np_ans[0][0], np_ans, use_gpu=False)
def testFillString(self):
np_ans = np.array([["yolo"] * 3] * 2)
with self.test_session(use_gpu=False):
tf_ans = tf.fill([2, 3], np_ans[0][0], name="fill").eval()
self.assertAllEqual(np_ans, tf_ans)
def testShapeFunctionEdgeCases(self):
# Non-vector dimensions.
with self.assertRaises(ValueError):
tf.fill([[0, 1], [2, 3]], 1.0)
# Non-scalar value.
with self.assertRaises(ValueError):
tf.fill([3, 2], [1.0, 2.0])
# Partial dimension information.
f = tf.fill(
tf.placeholder(tf.int32, shape=(4,)), 3.0)
self.assertEqual([None, None, None, None], f.get_shape().as_list())
class PlaceholderTest(tf.test.TestCase):
def testDtype(self):
with self.test_session():
p = tf.placeholder(tf.float32, name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(p_identity.eval(feed_dict={p: feed_array}),
feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testShape(self):
with self.test_session():
p = tf.placeholder(tf.float32, shape=(10, 10), name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(p_identity.eval(feed_dict={p: feed_array}),
feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float and "
"shape dim { size: 10 } dim { size: 10 }"):
p_identity.eval()
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in e.message):
p_identity.eval(feed_dict={p: feed_array[:5, :5]})
def testPartialShape(self):
with self.test_session():
p = tf.placeholder(tf.float32, shape=[None, 3], name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 3)
self.assertAllClose(p_identity.eval(feed_dict={p: feed_array}),
feed_array)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in e.message):
p_identity.eval(feed_dict={p: feed_array[:5, :2]})
def testControlDependency(self):
with self.test_session():
p = tf.placeholder(tf.int32, shape=[], name="p")
with tf.control_dependencies([p]):
c = tf.constant(5, tf.int32)
d = tf.mul(p, c)
self.assertEqual(10, d.eval(feed_dict={p: 2}))
def testFillNegative(self):
with self.test_session():
for shape in (-1,), (2, -1), (-1, 2):
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
" must be nonnegative"):
tf.fill(shape, 7).eval()
if __name__ == "__main__":
tf.test.main()
| [
"keveman@gmail.com"
] | keveman@gmail.com |
82324002112c69c793b3a471952ab9a99b8d73c8 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/timmahrt_pyAcoustics/pyAcoustics-master/pyacoustics/speech_rate/dictionary_estimate.py | 3f9a3d8036bf21e8df109bb47e85b80b53cafc9f | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 3,916 | py | '''
Created on Jan 28, 2015
@author: tmahrt
'''
import os
from os.path import join
from pyacoustics.utilities import utils
from pysle import isletool
def percentInside(startTime, endTime, cmprStartTime, cmprEndTime):
if (float(startTime) <= float(cmprEndTime) and
float(endTime) >= float(cmprStartTime)):
leftEdge = cmprStartTime - startTime
rightEdge = endTime - cmprEndTime
if leftEdge < 0:
leftEdge = 0
if rightEdge < 0:
rightEdge = 0
retVal = 1 - ((rightEdge + leftEdge)) / (endTime - startTime)
# No overlap
else:
retVal = 0
return retVal
def manualPhoneCount(tgInfoPath, isleFN, outputPath, skipList=None):
if skipList is None:
skipList = []
utils.makeDir(outputPath)
isleDict = isletool.LexicalTool(isleFN)
existFNList = utils.findFiles(outputPath, filterPaths=".txt")
for fn in utils.findFiles(tgInfoPath, filterExt=".txt",
skipIfNameInList=existFNList):
if os.path.exists(join(outputPath, fn)):
continue
print(fn)
dataList = utils.openCSV(tgInfoPath, fn)
dataList = [row[2] for row in dataList] # start, stop, tmpLabel
outputList = []
for tmpLabel in dataList:
if tmpLabel not in skipList:
syllableCount, phoneCount = isletool.getNumPhones(isleDict,
tmpLabel,
maxFlag=True)
else:
syllableCount, phoneCount = 0, 0
outputList.append("%d,%d" % (syllableCount, phoneCount))
outputTxt = "\n".join(outputList)
with open(join(outputPath, fn), "w") as fd:
fd.write(outputTxt)
def manualPhoneCountForEpochs(manualCountsPath, tgInfoPath, epochPath,
outputPath):
utils.makeDir(outputPath)
skipList = utils.findFiles(outputPath, filterExt=".txt")
for fn in utils.findFiles(tgInfoPath, filterExt=".txt",
skipIfNameInList=skipList):
epochList = utils.openCSV(epochPath, fn)
tgInfo = utils.openCSV(tgInfoPath, fn)
manualCounts = utils.openCSV(manualCountsPath, fn)
epochOutputList = []
for epochTuple in epochList: # Epoch num, start, stop
epochStart, epochStop = float(epochTuple[1]), float(epochTuple[2])
# Find all of the intervals that are at least partially
# contained within the current epoch
epochSyllableCount = 0
epochPhoneCount = 0
speechDuration = 0
for info, counts in utils.safeZip([tgInfo, manualCounts],
enforceLength=True):
start, stop = float(info[0]), float(info[1])
syllableCount, phoneCount = float(counts[0]), float(counts[1])
# Accounts for intervals that straddle an epoch boundary
multiplicationFactor = percentInside(start, stop,
epochStart, epochStop)
speechDuration += (stop - start) * multiplicationFactor
epochSyllableCount += syllableCount * multiplicationFactor
epochPhoneCount += phoneCount * multiplicationFactor
epochOutputList.append("%f,%f,%f" % (epochSyllableCount,
epochPhoneCount,
speechDuration))
with open(join(outputPath, fn), "w") as fd:
fd.write("\n".join(epochOutputList))
| [
"659338505@qq.com"
] | 659338505@qq.com |
b9d0e1993cebf34108fd3ab39478878cf896c0c1 | e85812f88a18b08a6e5ccff09c8e461efcc4c715 | /manage.py | da944a91ff841f8cc4d99639c093cf8d5d88dbee | [] | no_license | tjiang123456/guestt | 1c84479d79927fd5149a36d75144a28461b66ed2 | 7bdb49a6a704eccaef31e86a0a82ee84097f69fd | refs/heads/master | 2020-03-19T07:27:11.923424 | 2018-06-05T03:28:49 | 2018-06-05T03:28:49 | 136,114,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "guestt.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"email@example.com"
] | email@example.com |
64d3e39bc45f23a960c5d92793d0f167df8476e7 | b366806c99ac30e77789f80417978902e25628da | /boto3_exceptions/workmail.py | ef7317ccd08714c962e809adfcdbf095c1236e52 | [
"MIT"
] | permissive | siteshen/boto3_exceptions | 9027b38c238030859572afec7f96323171596eb7 | d6174c2577c9d4b17a09a89cd0e4bd1fe555b26b | refs/heads/master | 2020-04-19T03:15:02.525468 | 2019-10-23T07:37:36 | 2019-10-23T07:37:36 | 167,928,540 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,172 | py | import boto3
exceptions = boto3.client('workmail').exceptions
DirectoryServiceAuthenticationFailedException = exceptions.DirectoryServiceAuthenticationFailedException
DirectoryUnavailableException = exceptions.DirectoryUnavailableException
EmailAddressInUseException = exceptions.EmailAddressInUseException
EntityAlreadyRegisteredException = exceptions.EntityAlreadyRegisteredException
EntityNotFoundException = exceptions.EntityNotFoundException
EntityStateException = exceptions.EntityStateException
InvalidConfigurationException = exceptions.InvalidConfigurationException
InvalidParameterException = exceptions.InvalidParameterException
InvalidPasswordException = exceptions.InvalidPasswordException
MailDomainNotFoundException = exceptions.MailDomainNotFoundException
MailDomainStateException = exceptions.MailDomainStateException
NameAvailabilityException = exceptions.NameAvailabilityException
OrganizationNotFoundException = exceptions.OrganizationNotFoundException
OrganizationStateException = exceptions.OrganizationStateException
ReservedNameException = exceptions.ReservedNameException
UnsupportedOperationException = exceptions.UnsupportedOperationException
| [
"xiaojiang@actwill.com.cn"
] | xiaojiang@actwill.com.cn |
834a9ba3a9d2cf2da9d0053574fae4a3005e9118 | 987390ca6481ec5aa2b9e0e0e849203b6c22ce62 | /zkeco-core/adms/bak/iaccess/dev_comm_center.py | 0fd4ccad1a7fbc008065ce568f574fefa1cc4164 | [] | no_license | alungboy/johan-doc | 81b2363e7f2ad189d0623007eea66233a2e18f1c | 7ced14577405caf6127df03007619fe9cfda3847 | refs/heads/master | 2020-04-03T18:01:08.531971 | 2013-08-13T04:26:42 | 2013-08-13T04:26:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78,796 | py | # -*- coding: utf-8 -*-
#! /usr/bin/env python
#
#设备通讯进程池
#
# Changelog :
#
# 2010.3.19 Zhang Honggen
# create at zk park Dongguan
from multiprocessing import Pool, Process, Manager#pool tcp/ip---proess 485
import threading
import time
import datetime
from time import sleep, ctime
from django.utils.translation import ugettext_lazy as _
from django.utils import simplejson
from django.http import HttpResponse, HttpResponseRedirect
from django.utils.encoding import smart_str
from django.db import models, connection
import os, re
import redis
import dict4ini
from redis.server import queqe_server
from mysite.iaccess.devcomm import TDevComm
#from mysite.iaccess.video import TDevVideo
from mysite.iaccess.devcomm import *
from traceback import print_exc
from mysite.utils import printf, deletelog
from ctypes import *
from django.contrib.auth.decorators import login_required
from mysite.iclock.models.model_device import Device, DEVICE_ACCESS_CONTROL_PANEL, COMMU_MODE_PULL_RS485, COMMU_MODE_PULL_TCPIP, DEVICE_VIDEO_SERVER
from mysite.personnel.models.model_emp import format_pin
from dbapp.datautils import filterdata_by_user
try:
import cPickle as pickle
except:
import pickle
MAX_TRY_COMM_TIME = 5
MAX_CONNECT_COUNT = 60*24*30 #重连一个月失败后禁用
MAX_INTERVAL_CONNTECT_TIME = 60
PAUSE_TIMEOUT = 60 #485暂停超时60秒
g_devcenter=None
g_video_server={}
G_DEVICE_CONNECT = "CONNECT"
G_DEVICE_DISCONNECT = "DISCONNECT"
G_DEVICE_UPDATE_DATA = "DATA UPDATE"
G_DEVICE_QUERY_DATA = "DATA QUERY"
G_DEVICE_DELETE_DATA = "DATA DELETE"
G_DEVICE_GET_DATA="DEVICE GET"
G_DEVICE_SET_DATA="DEVICE SET"
G_DEVICE_CANCEL_ALARM = "CANCEL ALARM"
G_DEVICE_CONTROL_NO = "CONTROL NO"
G_DEVICE_UPGRADE_FIRMWARE = "UPGRADE FIRMWARE"
G_DEVICE_GET_OPTION = "OPTION GET"
G_DEVICE_SET_OPTION = "OPTION SET"
G_REAL_LOG = "REAL_LOG"
G_DOWN_NEWLOG = "DOWN_NEWLOG"
G_QUEUE_ERROR = "QUEUE_ERROR"
G_CHECK_SERVICE = "CHECK_SERVICE"
GR_RETURN_OK = 200
FORMAT_DATE = "%Y-%m-%d %H:%M:%S"
ALAEM_ID_START = 100#20
ALAEM_ID_END = 200
DOOR_STATE_ID = 255
EVENT_DOORSENSOROPEN = 200 #门磁开
EVENT_DOORSENSORCLOSE = 201 #门磁关
EVENT_LINKCONTROL = 6 #联动事件
EVENT_UNREGISTERCARD = 27 #卡未注册
INOUT_SEVER = 220
INOUT_SHORT = 221
MAX_RTLOG = 5000 #实时事件最大缓存
DEVOPT="DEV_OPERATE" #设备操作缓存, 新增、修改、删除设备操作
#CENTER_PROCE_HEART="CENTER_HEART_%s"
CENTER_PROCE_LIST="CENTER_PROCE_LIST"
CENTER_MAIN_PID="CENTER_MAIN_PID"
OPERAT_ADD =1
OPERAT_EDIT =2
OPERAT_DEL =3
PROCESS_NORMAL = 0
PROCESS_WAIT_PAUSE = 1
PROCESS_PAUSE = 2
DEVICE_COMMAND_TABLE = [
_(u'用户信息'),
_(u'门禁权限信息'),
_(u'假日设置'),
_(u'时间段设置'),
_(u'首卡常开设置'),
_(u'多卡开门设置'),
_(u'事件记录')
]
DEVICE_MONITOR_CONTENT = [
_(u'更新数据:'),
_(u'查询数据:'),
_(u'删除数据:'),
_(u'获取设备状态'),
_(u'设置设备状态'),
_(u'获取设备参数:'),
_(u'设置设备参数:'),
_(u'连接设备'),
_(u'获取实时事件'),
_(u'获取新记录'),
_(u'连接断开'),
_(u'取消报警'),
_(u'命令队列检测'),
_(u'数据中心服务检测')
]
#执行失败:
DEVICE_COMMAND_RETURN = {
'0': _(u'正常'),
'-1': _(u'命令发送失败'),
'-2': _(u'命令超时'),
'-3': _(u'需要的缓存不足'),
'-4': _(u'解压失败'),
'-5': _(u'读取数据长度错误'),
'-6': _(u'通讯错误'), #解压的长度和期望的长度不一致
'-7': _(u'命令重复'),
'-8': _(u'连接尚未授权'),
'-9': _(u'数据错误,CRC校验失败'),
'-10': _(u'数据错误,SDK无法解析'),#数据错误,PullSDK无法解析
'-11': _(u'数据参数错误'),
'-12': _(u'命令执行错误'),
'-13': _(u'命令错误,没有此命令'),
'-14': _(u'通讯密码错误'),
'-15': _(u'写文件失败'),#固件将文件写到本地时失败
'-16': _(u'读文件失败'),
'-17': _(u'文件不存在'),#读取时找不到文件
'-18': _(u'存储空间已满'),
'-19': _(u'校验和出错'),
'-20': _(u'数据长度错误'),#接受到的数据长度与给出的数据长度不一致
'-21': _(u'没有设置平台参数'),
'-22': _(u'固件平台不一致'),#固件升级,传来的固件的平台与本地的平台不一致
'-23': _(u'升级的固件版本过旧'),#升级的固件版本比设备中的固件版本老
'-24': _(u'升级文件标识出错'),#升级的文件标识出错
'-25': _(u'文件名错误'),#固件升级,传来的文件名不对,即不是emfw.cfg
'-99': _(u'未知错误'),
'-100': _(u'表结构不存在'),
'-101': _(u'表结构中,条件字段不存在'),
'-102': _(u'字段总数不一致'),
'-103': _(u'字段排序不一致'),
'-104': _(u'实时事件数据错误'),
'-105': _(u'解析数据时,数据错误'),
'-106': _(u'数据溢出,下发数据超出4M'),
'-107': _(u'获取表结构失败'),
'-108': _(u'无效OPTIONS选项'),
'-201': _(u'库文件不存在'), #LoadLibrary失败
'-202': _(u'调用接口失败'),
'-203': _(u'通讯初始化失败'),
'-301': _(u'获取TCP/IP版本失败'),#??????????????
'-302': _(u'错误的TCP/IP版本号'),
'-303': _(u'获取协议类型失败'),
'-304': _(u'无效SOCKET'),
'-305': _(u'SOCKET错误'),
'-306': _(u'HOST错误'),
'-1001': _(u'连接断开'),
'-1002':_(u'禁用'),
'-1003':_(u'服务未启动'),#数据中心服务未启动
'-1100':_(u'队列异常! 请取消队列后重新同步数据'),#
'1000': _(u'获取新记录'),
}
#视频连动录像线程
class TThreadComm(object):
def __init__(self,func,args):
self.func = func
self.args = args
def __call__(self):
apply(self.func, self.args)
def video_record(linkageio, fstr, video_log):
from django.conf import settings
# from mysite.iaccess.video import TDevVideo
#print "-----", linkageio.video_linkageio.ipaddress, linkageio.video_linkageio.ip_port, linkageio.video_linkageio.video_login, linkageio.video_linkageio.comm_pwd
global g_video_server
#print "g_video_server=", g_video_server
video = g_video_server[linkageio.video_linkageio.ipaddress]
if video is None:
return
filepath = "%s\\tmp\\OCXRecordFiles\\"%settings.APP_HOME
#print "filepath=", filepath, " fstr=", fstr, "video_delay_time=", linkageio.video_delay_time
if video.record_file[linkageio.lchannel_num] == "":
video.record_v23(filepath, fstr, linkageio.lchannel_num, linkageio.video_delay_time)
video_log.f_video=video.record_file[linkageio.lchannel_num]+".mp4"
video_log.save(force_update=True)
return
#-201 调用库文件失败
#-202 库接口调用失败
#-203 通讯初始化失败
#-204 连接失败,其它错误
#-301-304 底层连接初始化失败
def strtodatetime(timestr): #1111-11-11 11:11:11
dt=timestr.split(' ')
if len(dt)>1:
dtime=dt[0].split('-')+dt[1].split(':')
try:
tt=datetime.datetime(int(dtime[0]),int(dtime[1]),int(dtime[2]),int(dtime[3]),int(dtime[4]),int(dtime[5]))
except:
tt=datetime.datetime(1900,1,1,0,0,0)
return tt
else:
return None
def FmtTTime(ttime):
try:
t=int(ttime)
except:
t=0
sec=t % 60
t/=60
min=t % 60
t/=60
hour=t % 24
t/=24
mday=t % 31+1
t/=31
mon=t % 12
t/=12
year=t+2000
try:
tt=datetime.datetime(year, mon+1, mday, hour, min, sec)
return tt
except:
return None
def customSql(sql,action=True):
cursor = connection.cursor()
cursor.execute(sql)
if action:
connection._commit()
return cursor
def strtoint(str):
try:
ret=int(str)
except:
ret=0
return ret
def process_test(line):
redis_Cach_table="dev2"
redis_Cach = queqe_server()
cmdline=redis_Cach.lrange(redis_Cach_table, line, line)
if cmdline != None:
process_comm_task(None, cmdline[0])
def get_cmd_table(cmd_str):
retstr=""
if (cmd_str.startswith('user')):
retstr=unicode(DEVICE_COMMAND_TABLE[0])
elif (cmd_str.startswith('userauthorize')):
retstr=unicode(DEVICE_COMMAND_TABLE[1])
elif(cmd_str.startswith('holiday')):
retstr=unicode(DEVICE_COMMAND_TABLE[2])
elif(cmd_str.startswith('timezone')):
retstr=unicode(DEVICE_COMMAND_TABLE[3])
elif(cmd_str.startswith('firstcard')):
retstr=unicode(DEVICE_COMMAND_TABLE[4])
elif(cmd_str.startswith('multimcard')):
retstr=unicode(DEVICE_COMMAND_TABLE[5])
elif(cmd_str.startswith('transaction')):
retstr=unicode(DEVICE_COMMAND_TABLE[6])
return retstr
def get_cmd_content(cmd_str):
comm_param=cmd_str.strip()
retstr=""
if (comm_param.startswith(G_QUEUE_ERROR)):
retstr=unicode(DEVICE_MONITOR_CONTENT[12])
if (comm_param.startswith(G_DEVICE_CONNECT)):
retstr=unicode(DEVICE_MONITOR_CONTENT[7])
elif (comm_param.startswith(G_REAL_LOG)):
retstr=unicode(DEVICE_MONITOR_CONTENT[8])
elif (comm_param.startswith(G_DOWN_NEWLOG)):
retstr=unicode(DEVICE_MONITOR_CONTENT[9])
elif (comm_param.startswith(G_DEVICE_DISCONNECT)):
retstr=unicode(DEVICE_MONITOR_CONTENT[10])
elif (comm_param.startswith(G_DEVICE_UPDATE_DATA)):
strs = comm_param.split(" ", 3)
table = strs[2]
retstr=unicode(DEVICE_MONITOR_CONTENT[0])+get_cmd_table(table)
elif (comm_param.startswith(G_DEVICE_QUERY_DATA)):
strs = comm_param.split(" ", 4)
table = strs[2]
retstr=unicode(DEVICE_MONITOR_CONTENT[1])+get_cmd_table(table)
elif(comm_param.startswith(G_DEVICE_DELETE_DATA)):
strs = comm_param.split(" ", 3)
table = strs[2]
retstr=unicode(DEVICE_MONITOR_CONTENT[2])+get_cmd_table(table)
elif(comm_param.startswith(G_DEVICE_GET_DATA)):
retstr=unicode(DEVICE_MONITOR_CONTENT[3])
elif(comm_param.startswith(G_DEVICE_SET_DATA)):
strs = comm_param.split(" ", 5)
retstr=unicode(DEVICE_MONITOR_CONTENT[4])
elif(comm_param.startswith(G_DEVICE_GET_OPTION)):
strs = comm_param.split(" ", 2)
opt=strs[2]
retstr=unicode(DEVICE_MONITOR_CONTENT[5])+opt
elif(comm_param.startswith(G_DEVICE_SET_OPTION)):
strs = comm_param.split(" ", 3)
opt=strs[2]
retstr=unicode(DEVICE_MONITOR_CONTENT[6])+opt
elif comm_param.startswith(G_DEVICE_CANCEL_ALARM):
strs = comm_param.split(" ")
opt = strs[2]
retstr = unicode(DEVICE_MONITOR_CONTENT[7]) + opt
elif comm_param.startswith(G_CHECK_SERVICE):
retstr = unicode(DEVICE_MONITOR_CONTENT[13])
return retstr
@login_required
def downdata_progress(request): #进度条后台控制
cdatas = []
skey=request.session.session_key
#print "downdata_progress=", skey
q_server=queqe_server()
cur_gress=q_server.get("DEV_COMM_PROGRESS_%s"%skey)
tol_gress=q_server.get("DEV_COMM_SYNC_%s"%skey)
if cur_gress and tol_gress:
cur_strs=cur_gress.split(",", 2)
tol_gress=tol_gress.split(",", 2)
try:
icur=int(cur_strs[1])
except:
icur=0
try:
itol=(int(tol_gress[1])*100)/int(tol_gress[0])
except:
itol=0
cdata={
'dev': cur_strs[0].decode("gb18030"),
'progress':icur,
'tolprogress':itol,
}
cdatas.append(cdata)
q_server.connection.disconnect()
cc={
'index': 1,
'data': cdatas,
}
else:
cdata={
'dev': "",
'progress':0,
'tolprogress':0,
}
cdatas.append(cdata)
q_server.connection.disconnect()
cc={
'index': 0,
'data': cdatas,
}
rtdata=simplejson.dumps(cc)
return HttpResponse(smart_str(rtdata))
#检查服务是否启动 True启动,False未启动
def check_service_commcenter():
#return True
s = os.popen("sc.exe query ZKECODataCommCenterService").read()
if ": 1 STOPPED" in s:
return False
return True
#进行设备监控--iclock
@login_required
def get_device_monitor(request):
service_enable = check_service_commcenter()
from mysite.iclock.models import Device, DevCmd
from mysite.personnel.models import Area
q_server=queqe_server()
u = request.user
aa = u.areaadmin_set.all()
a_limit = aa and [int(a.area_id) for a in aa] or [int(area.pk) for area in Area.objects.all()]#非超级管理员且没有配置有效区域(默认全部)的有效区域id列表(即用户手动配置过用户区域的)
dev_list = Device.objects.filter(area__pk__in=a_limit).filter(device_type=DEVICE_ACCESS_CONTROL_PANEL).order_by('id')#当前用户授权范围内的门禁控制器
cdatas = []
for dev in dev_list:
ret = 0
op_type = ""
op_state = ""
if not service_enable:
op_type = get_cmd_content("CHECK_SERVICE")
ret = '-1003' #警告 <-1000
op_state = unicode(DEVICE_COMMAND_RETURN[ret])
key=dev.command_temp_list_name()#ICLOCK_%s_TMP
ucmd=q_server.get(key)
cmdcount=q_server.llen(dev.new_command_list_name())#NEWCMDS_%s
cntkey=dev.command_count_key()#ICLOCK_%s_CMD
cnt=q_server.get(cntkey)#命令条数
if cmdcount is None:
cmdcount="0"
if cnt is None:
cnt="0"
if cnt.find('\x00'):
cnt=cnt.strip('\x00')
try:
cnt=int(cnt)
except:
cnt=0
try:
cmdcount=int(cmdcount)
except:
cmdcount=0
if int(cnt)>0:
if int(cmdcount)==0:
q_server.set(cntkey, "0")
cnt=q_server.get(cntkey)
if int(cnt)>0:
pp=(int(cnt)-int(cmdcount))*100/int(cnt)
if pp < 0:
pp=0
percent="%d%%"%pp
else:
percent="100%"
if ucmd is None:
if service_enable:#服务启动时默认第一个操作为获取实时事件(一闪即过)
op_type = get_cmd_content("REAL_LOG")#
ret = '0' #警告 <-1000
op_state = unicode(DEVICE_COMMAND_RETURN[ret])
cdata={
'id':dev.id,
'devname':dev.alias,
'sn':dev.sn,
'op_type': op_type,
'op_state':op_state,
'retmemo': u"",
'ret':ret,
'percent':percent,
'CmdCount':cmdcount,
}
cdatas.append(cdata)
continue
try:
acmd=pickle.loads(ucmd)
except:
print_exc()
acmd=None
if acmd is None:
cdata={
'id':dev.id,
'devname':dev.alias,
'sn':dev.sn,
'op_type': op_type,
'op_state': op_state,
'retmemo': "",
'ret': ret,
'percent':percent,
'CmdCount':cmdcount,
}
cdatas.append(cdata)
continue
if service_enable:
ret = acmd.CmdReturn
op_type = get_cmd_content(acmd.CmdContent)
if acmd.CmdReturn >= 0:
op_state = unicode(DEVICE_COMMAND_RETURN["0"])
else:
try:
op_state = unicode(DEVICE_COMMAND_RETURN[str(acmd.CmdReturn)])
except:
op_state = _(u"%(f)s:错误代码%(ff)d")%{"f":DEVICE_COMMAND_RETURN["-1001"], "ff":acmd.CmdReturn}
reason=""
cdata = {
'id':dev.id,
'devname':dev.alias,
'sn':dev.sn,
'op_type':op_type,#操作类型
'op_state':op_state,#当前操作状态
'retmemo': reason,
'ret': ret,
'percent':percent,
'CmdCount':cmdcount,
}
cdatas.append(cdata)
cc = {
'data':cdatas
}
q_server.connection.disconnect()
rtdata=simplejson.dumps(cc)
return HttpResponse(smart_str(rtdata))
@login_required
def ClearCmdCache(request):
from mysite.iclock.models import Device
dev_id = request.GET.get("devid", 0)
if dev_id:
dev=Device.objects.get(id=dev_id)
q_server=queqe_server()
q_server.delete(dev.new_command_list_name())
q_server.delete(dev.command_temp_list_name())
q_server.delete(dev.command_count_key())
q_server.connection.disconnect()
return HttpResponse(smart_str({'ret':1}))
else:
return HttpResponse(smart_str({'ret':0}))
@login_required
def comm_error_msg(request):
from mysite.iclock.models import Device, DevCmd
from mysite.iclock.models.model_device import DEVICE_ACCESS_CONTROL_PANEL
q_server=queqe_server()
cdatas = []
cc={}
dev_list = filterdata_by_user(Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL),request.user)
#print '-----dev_list=', dev_list
icount=0
for dev in dev_list:
key=dev.command_temp_list_name()
ucmd=q_server.get(key)
if ucmd is None:
continue
try:
acmd=pickle.loads(ucmd)
except:
acmd=None
if acmd is None:
continue
if acmd.CmdReturn <= 0:
icount+=1
cdata={
'devname':acmd.SN.alias,
}
cdatas.append(cdata)
cc={
'cnt':icount,
'data':cdatas,
}
q_server.connection.disconnect()
rtdata=simplejson.dumps(cc)
return HttpResponse(smart_str(rtdata))
def get_door_state(val, doorno):
if doorno==1:
return (val & 0x000000FF)
elif doorno==2:
return (val & 0x0000FF00) >> 8
elif doorno == 3:
return (val & 0x00FF0000) >> 16
elif doorno == 4:
return (val & 0xFF000000) >> 24
#将门状态写入缓存中
def set_door_connect(device, vcom):
q_server=queqe_server()
doorstate=q_server.get(device.get_doorstate_cache_key())
# print "doorstate=",doorstate
if doorstate is None:
doorstate="0,0,0"
doorstr=doorstate.split(",", 3)
if vcom > 0:#"DEVICE_DOOR_%s"%self.id
q_server.set(device.get_doorstate_cache_key(), "%s,%s,%d"%(doorstr[0],doorstr[1], vcom))
else:
q_server.set(device.get_doorstate_cache_key(), "0,0,0")#没连接上
q_server.connection.disconnect()
#门状态监控
# state(0无门磁,1门关,2门开) alarm(1报警 2门开超时)connect(0不在线,1在线)
def door_state_monitor(dev_list):#dev_list为QuerySet---devids需为list
from mysite.iclock.models import Device
service_enable = check_service_commcenter()
q_server = queqe_server()
# if devids==0:#all
# dev_list = Device.objects.all()
# else:
# dev_list = Device.objects.filter(id__in=devids)#type(devids)!=list and Device.objects.filter(id__in=[devids]) or Device.objects.filter(id__in=[devids])
cdatas = []
# print '--------dev_list=',dev_list
for dev in dev_list:
key = dev.get_doorstate_cache_key()
doorstate = q_server.get(key)
#print 'doorstate=',doorstate
if doorstate and service_enable:#服务没有启动(含手动),前端门显示不在线
val=doorstate.split(",", 3)
try:
vdoor = int(val[0])#设备中所有门的开关状态
except:
print_exc()
vdoor = 0
try:
valarm = int(val[1])#设备中所有门的 报警 门开超时
except:
print_exc()
valarm = 0
try:
vcon = int(val[2])#是否在线
except:
print_exc()
vcon = 0
else:
vdoor = 0
valarm = 0
vcon = 0
door = dev.accdoor_set.all()
for d in door:
state = get_door_state(vdoor, d.door_no)
alarm = get_door_state(valarm, d.door_no)
cdata = {
'id': int(d.id),
'state': int(state),
'alarm': int(alarm),
'connect': int(vcon),
}
cdatas.append(cdata)
cc={
'data':cdatas,
}
q_server.connection.disconnect()
#print cc
#rtdata=simplejson.dumps(cc)
return cc
#return HttpResponse(smart_str(rtdata))
def checkdevice_and_savecache(q, devobj):
from mysite.iclock.models import Device
last_activity_time=q.get(devobj.get_last_activity())
#修改最后连接时间
if last_activity_time:
now_t=time.mktime(datetime.datetime.now().timetuple())
if float(last_activity_time) > now_t:
q.set(devobj.get_last_activity(), "1")
elif now_t - float(last_activity_time) > 120:
try:
dev=Device.objects.get(id = devobj.id)
dev.last_activity = datetime.datetime.now()
dev.save(force_update=True, log_msg=False)
q.set(devobj.get_last_activity(), str(now_t))
except:
print_exc()
else:
q.set(devobj.get_last_activity(), "1")
def set_doorstr(istr, val, doorid): #设置某个门的状态
dest=[0,0,0,0]
for i in range(0, 4, 1):
dest[i]=istr>>(i*8)&0xFF
dest[doorid-1]=val
return dest[0] | (dest[1]<<8) | (dest[2]<<16) | (dest[3]<<24)
#time, Pin, cardno, doorID, even_type, reserved, verified
def save_event_log(str, doorobj, devobj=None):
from mysite.iaccess.models import AccRTMonitor, AccDoor
from mysite.iclock.models import Device
devid=0
devname=""
doorid=0
doorname=""
if devobj:
try:
dev=Device.objects.get(id=devobj.id)
devid=dev.id
devname=dev.alias
except:
print_exc()
if doorobj:
try:
door=AccDoor.objects.get(id=doorobj[0].id)
doorid=door.id
doorname=door.door_name
except:
print_exc()
if (strtoint(str[4]) == INOUT_SEVER) or (strtoint(str[4]) == INOUT_SHORT):
try:
rtlog=AccRTMonitor(device_id=devid, device_name=devname, time=strtodatetime(str[0]), pin = int(str[1]) and format_pin(str[1]) or "--",\
event_type=(len(str[4])>0) and str[4] or 0,state=(len(str[5])>0) and str[5] or 0, \
in_address=(len(str[3])>0) and str[3] or 0, card_no = int(str[2]) and str[2] or "--")
rtlog.save(force_insert=True)
except:
print_exc()
elif strtoint(str[4]) == EVENT_LINKCONTROL:
if (strtoint(str[6] == INOUT_SEVER)) or (strtoint(str[6] == INOUT_SHORT)):
try:
rtlog=AccRTMonitor(device_id=devid, device_name=devname, time=strtodatetime(str[0]), pin = int(str[1]) and format_pin(str[1]) or "--",\
event_type=(len(str[4])>0) and str[4] or 0,state=(len(str[5])>0) and str[5] or 0, \
trigger_opt=(len(str[6])>0) and str[6] or 0,in_address=(len(str[3])>0) and str[3] or 0, card_no = "--")
rtlog.save(force_insert=True)
except:
print_exc()
else:
try:
rtlog=AccRTMonitor(device_id=devid, device_name=devname, time=strtodatetime(str[0]), pin = int(str[1]) and format_pin(str[1]) or "--", door_id=doorid, door_name=doorname, \
event_type=(len(str[4])>0) and str[4] or 0,state=(len(str[5])>0) and str[5] or 0, \
trigger_opt=(len(str[6])>0) and str[6] or 0, card_no = "--")
rtlog.save(force_insert=True)
except:
print_exc()
else:
try:
rtlog=AccRTMonitor(device_id=devid, device_name=devname, time=strtodatetime(str[0]), pin = int(str[1]) and format_pin(str[1]) or "--", door_id=doorid, door_name=doorname, \
event_type=(len(str[4])>0) and str[4] or 0,state=(len(str[5])>0) and str[5] or 0, \
verified=(len(str[6])>0) and str[6] or 0, card_no = int(str[2]) and str[2] or "--")
rtlog.save(force_insert=True)
except:
print_exc()
def appendrtlog(q_server, devobj, rtlog):
from mysite.iaccess.models import AccRTMonitor,AccDoor
from mysite.iclock.models import Transaction
from mysite.personnel.models import Employee
try:
rtlog=rtlog.split("\r\n", 1)[0]
#print '---rtlog=',rtlog
str = rtlog.split(",",7)
#print '---str=',str
doorstr=""
if len(str) < 7: #不合规范数据
return 0
if strtoint(str[4]) == DOOR_STATE_ID:#0时间+1门开关状态+2报警或门开超时+3没用+4(255标明该事件为门状态,否则为事件)+5 没用+6验证方式(200其他)
#q_server.set(devobj.get_doorstate_cache_key(), "%s,%s,1"%(str[1],str[2]))
#dev_doorstatus[devobj.get_doorstate_cache_key()] = "%s,%s,1" % (str[1], str[2])
printf("rtlog ---- %s %s"%(str[1],str[2]))
return
if strtoint(str[4])==EVENT_DOORSENSOROPEN:
doorstate=q_server.get(devobj.get_doorstate_cache_key())
# print "doorstate=",doorstate
if doorstate is None:
doorstate="0,0,0"
doorstr=doorstate.split(",", 3)
try:
val=set_doorstr(int(doorstr[0]), 0x02, int(str[3]))
except:
val=0
q_server.set(devobj.get_doorstate_cache_key(), "%d,%s,1"%(val,doorstr[1]))
if strtoint(str[4])== EVENT_DOORSENSORCLOSE:
doorstate=q_server.get(devobj.get_doorstate_cache_key())
#print "doorstate=",doorstate
if doorstate is None:
doorstate="0,0,0"
doorstr=doorstate.split(",", 3)
try:
val=set_doorstr(int(doorstr[0]), 0x01, int(str[3]))
except:
val=0
q_server.set(devobj.get_doorstate_cache_key(), "%d,%s,1"%(val,doorstr[1]))
if (strtoint(str[4]) >= ALAEM_ID_START) and (strtoint(str[4]) < ALAEM_ID_END):
doorstate=q_server.get(devobj.get_doorstate_cache_key())
#print "doorstate=",doorstate
if doorstate is None:
doorstate="0,0,0"
doorstr=doorstate.split(",", 3)
try:
val=set_doorstr(int(doorstr[1]), int(str[4]), int(str[3]))
except:
val=0
q_server.set(devobj.get_doorstate_cache_key(), "%s,%d,1"%(doorstr[0], val))
#print " end doorstate=", doorstr
try:
doorobj=None
if (strtoint(str[4]) == INOUT_SEVER) or (strtoint(str[4]) == INOUT_SHORT): #辅助事件
pass
#print str[3]
elif (strtoint(str[4]) == EVENT_LINKCONTROL) and (strtoint(str[6]) == INOUT_SEVER): #连动事件
pass
#print str[3]
elif (strtoint(str[4]) == EVENT_LINKCONTROL) and (strtoint(str[6]) == INOUT_SHORT): #连动事件
pass
#print str[3]
else:
doorobj=AccDoor.objects.filter(device=devobj).filter(door_no=str[3])
if doorobj is not None:
str[3]=doorobj and doorobj[0].id or 0
except:
print_exc()
#if q_server.llen("MONITOR_RT")<MAX_RTLOG:
try:
log="%s,%s,%s,%s,%s,%s,%s,%d"%(str[0],str[1],str[3],str[4],str[5], str[6], str[2], devobj and devobj.id or 0)
q_server.rpush("MONITOR_RT", log)
except:
print_exc()
if (strtoint(str[4]) >= ALAEM_ID_START) and (strtoint(str[4]) < ALAEM_ID_END):
q_server.rpush("ALARM_RT", log)
#time, Pin, cardno, doorID, even_type, reserved, verified
save_event_log(str, doorobj, devobj)
if doorobj:
if doorobj[0].is_att:
from models.accmonitorlog import EVENT_LOG_AS_ATT
#if ((strtoint(str[4]) >= 0) and (strtoint(str[4]) <=2)) or ((strtoint(str[4]) >= 21) and (strtoint(str[4]) <=23)) or (strtoint(str[4]) == 5):
if strtoint(str[4]) in EVENT_LOG_AS_ATT:
try:
pin1=(len(str[1])>0) and str[1] or 0
user=Employee.objects.filter(PIN=format_pin(pin1))
if user:
trans=Transaction(UserID=user[0], SN=doorobj[0].device, TTime=strtodatetime(str[0]))
trans.save(force_insert=True)
except:
print_exc()
except:
print_exc()
def appendDevCmdOld(sn, cmdStr, cmdTime=None):
from mysite.iclock.models import DevCmd
cmd=DevCmd(SN=dObj, CmdContent=cmdStr, CmdCommitTime=(cmdTime or datetime.datetime.now()))
cmd.save(force_insert=True)
return cmd.id
#Cardno,Pin,Verified,DoorID,EventType,InOutState,Time_second 记录
#time, Pin, cardno, doorID, even_type, reserved, verified 事件
def process_comm_task(devs, comm_param):#dev指DevComm的实例,而非Device的实例--comment by darcy
ret=0
dev=devs.comm
if (comm_param.startswith(G_DEVICE_CONNECT)):
qret = dev.connect()
return {"ret":qret["result"], "retdata":qret["data"]}
elif (comm_param.startswith(G_DEVICE_DISCONNECT)):
return dev.disconnect()
elif (comm_param.startswith(G_DEVICE_UPDATE_DATA)):
strs = comm_param.split(" ",3)
table = strs[2]
if(len(table)>0):
data=comm_param[comm_param.find(table)+len(table)+1:]
data=re.sub(ur'\\', '\r', data)
qret = dev.update_data(table.strip(),data.strip(),"")
else:
pass
#print "command error"
return {"ret":qret["result"], "retdata":qret["data"]}
elif (comm_param.startswith(G_DEVICE_QUERY_DATA)):
if (comm_param.find("transaction")>0):#下载全部刷卡事件
from mysite.iaccess.models import AccRTMonitor, AccDoor
qret=dev.get_transaction(False)
printf("24. user down all transaction rec=%d"%qret['result'])
if qret['result']>0:
for i in range(1, qret['result'], 1):
log = qret['data'][i]
str = log.split(",",7)
doorobj=None
try:
if (strtoint(str[4]) == INOUT_SEVER) or (strtoint(str[4]) == INOUT_SHORT): #辅助事件
pass
#print str[3]
elif (strtoint(str[4]) == EVENT_LINKCONTROL) and (strtoint(str[6]) == INOUT_SEVER): #连动事件
pass
#print str[3]
elif (strtoint(str[4]) == EVENT_LINKCONTROL) and (strtoint(str[6]) == INOUT_SHORT): #连动事件
pass
#print str[3]
else:
doorobj=AccDoor.objects.filter(device=devs.devobj).filter(door_no=str[3])
if doorobj is not None:
str[3]=doorobj and doorobj[0].id or 0
except:
print_exc()
try:
restr="%s,%s,%s,%s,%s,%s,%s"%(FmtTTime(str[6]),str[1],str[0],str[3],str[4],str[5],str[2])
save_event_log(restr.split(",",7), doorobj, devs.devobj)
except:
print_exc()
continue
return {"ret":qret['result'], "retdata":""}
else:
return {"ret":-1, "retdata":""}
else:
str = ""
strs = comm_param.split(" ",4)
table = strs[2]
field_names = strs[3]
if(len(table)>0):
filter=comm_param[comm_param.find(field_names)+len(field_names)+1:]
qret = dev.query_data(table.strip(),fields.strip(),filter.strip(), "")
else:
pass
#printf "command error"
return {"ret":qret["result"],"retdata":qret["data"]}
elif(comm_param.startswith(G_DEVICE_DELETE_DATA)):
strs = comm_param.split(" ",3)
table = strs[2]
if(len(table)>0):
qret = dev.delete_data(table,comm_param[comm_param.find(table)+len(table)+1:],)
else:
pass
#print "command error"
return {"ret":qret["result"],"retdata":qret["data"]}
elif(comm_param.startswith(G_DEVICE_GET_DATA)):
return
elif(comm_param.startswith(G_DEVICE_SET_DATA)):
try:
comm_param=comm_param.strip()
strs = comm_param.split(" ", 5)
door=int(strs[2])
index=int(strs[3])
state=int(strs[4])
qret=dev.controldevice(door, index, state)
return {"ret":qret["result"],"retdata":qret["data"]}
except:
print_exc()
return
elif(comm_param.startswith(G_DEVICE_CONTROL_NO)):#
try:
comm_param = comm_param.strip()
strs = comm_param.split(" ", 5)
door = int(strs[2])#门编号
state = int(strs[3])#启用(1)或禁用(0)
qret = dev.control_normal_open(door, state)#控制常开
#print '---qret=',qret,'-----',door,'---',state
return {"ret": qret["result"], "retdata": qret["data"]}
except:
print_exc()
return
elif(comm_param.startswith(G_DEVICE_CANCEL_ALARM)):
try:
qret = dev.cancel_alarm()#取消报警
return {"ret": qret["result"], "retdata": qret["data"]}
except:
print_exc()
return
# elif(comm_param.startswith(G_DEVICE_UPGRADE_FIRMWARE)):
# try:
# comm_param = comm_param.strip()
# strs = comm_param.split(" ", 5)
# file_name = strs[2]
# import struct
# (buffer,) = struct.unpack("s", strs[3])
# buff_len = strs[4]
# qret = dev.update_firmware(file_name, buffer, buff_len)#升级固件
# except:
# print_exc()
# return
elif(comm_param.startswith(G_DEVICE_GET_OPTION)):
strs = comm_param.split(" ",2)
opt=strs[2]
if len(opt)>0:
optitem=re.sub(ur'\t', ',', opt)
qret=dev.get_options(optitem.strip())
return {"ret":qret["result"],"retdata":qret["data"]}
elif(comm_param.startswith(G_DEVICE_SET_OPTION)):
strs = comm_param.split(" ",3)
opt=strs[2]
if len(opt)>0:
optitem=re.sub(ur'\t', ',', opt)
qret=dev.set_options(optitem.strip())
return {"ret":qret["result"],"retdata":qret["data"]}
else:
return {"ret":0, "retdata":"unknown command"}
class DeviceMonitor(object):
def __init__(self):
self.id = 0
self.comm_tmp=""
self.cln = ""
self.devobj = None
self.comm = None
self.try_failed_time = 0
self.try_connect_count = 0
self.try_connect_delay = 0
#命令处理
def process_general_cmd(dev,q_server):
cmd_ret=False
try:
acmd=q_server.getrpop(dev.cln) #防意外掉电,命令丢失, 先取出执行,成功再删除
if (acmd!=None) and (acmd.startswith(G_QUEUE_ERROR)):
cmd_ret=True
acmd=None
try:
from mysite.iclock.models.model_devcmd import DevCmd
acmd=DevCmd(SN=dev.devobj, CmdContent=G_QUEUE_ERROR, CmdReturn=-1100)
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
print "add queue error"
except:
print_exc()
acmd=None
except:
printf("error 2 zzz")
print_exc()
try:
if(acmd != None):
acmd=pickle.loads(acmd)
except:
acmd=None
if acmd is not None:
try:
from mysite.iclock.models.model_device import MAX_COMMAND_TIMEOUT_SECOND
cmdline=str(acmd.CmdContent)
if acmd.CmdImmediately:
now_t=datetime.datetime.now()
if (now_t - acmd.CmdCommitTime).seconds > MAX_COMMAND_TIMEOUT_SECOND:
q_server.rpop(dev.cln)
return False
#print "general====", cmdline,"==="
except:
printf("check cmd error")
print_exc()
if cmdline != None:
try:
cmd_ret = True
try:
ret=process_comm_task(dev, cmdline)
except:
printf("%s *********process_comm_task error"%dev.devobj.alias.encode("gb18030"), True)
printf("8.%s -- process_general_cmd cmd=%s, ret=%d"%(dev.devobj.alias.encode("gb18030"), cmdline, ret["ret"]))
if (ret["ret"] >= 0): #执行成功, 写入数据库
q_server.rpop(dev.cln)
acmd.CmdReturn=ret["ret"]
acmd.CmdReturnContent=ret["retdata"]
acmd.CmdTransTime=datetime.datetime.now()
acmd.save()
checkdevice_and_savecache(q_server, dev.devobj)
else:
if acmd.CmdImmediately == 1: #立即执行的命令,只执行一次,包括失败
q_server.rpop(dev.cln)
if ret["ret"] == -18:
q_server.deletemore(dev.cln) #存贮空间不足,清空命令缓存
acmd.CmdReturn=ret["ret"]
cmd_ret=False
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
printf("process_comm_task defail....")
cmd_ret=False
return cmd_ret
def add_dev_dict(devdic,devobj):
try:
devdic[devobj.id] = DeviceMonitor()
devdic[devobj.id].id = devobj.id
devdic[devobj.id].cln =devobj.new_command_list_name()
devdic[devobj.id].comm_tmp= devobj.command_temp_list_name()
devdic[devobj.id].devobj = devobj
devdic[devobj.id].comm= TDevComm(devobj.getcomminfo())
devdic[devobj.id].comm.connect()
devdic[devobj.id].try_connect_count=0
if devdic[devobj.id].comm.hcommpro>0:
devdic[devobj.id].try_connect_delay=time.mktime(datetime.datetime.now().timetuple())
set_door_connect(devdic[devobj.id].devobj, 1)#在线
if devdic[devobj.id].devobj.sync_time:
devdic[devobj.id].devobj.set_time(False)
else:
set_door_connect(devdic[devobj.id].devobj, 0)#离线
devdic[devobj.id].try_connect_delay=0
except:
printf("15. add_dev_dict id=%d error"%devobj.id)
def is_comm_io_error(errorid):
return ((errorid < ERROR_COMM_OK) and (errorid > ERROR_COMM_PARAM))
def check_and_down_log(dev): #下载新记录
from mysite.iaccess.models import AccRTMonitor, AccDoor
from mysite.iclock.models.model_devcmd import DevCmd
try:
cfg=dict4ini.DictIni(os.getcwd()+"/appconfig.ini",values={"iaccess":{"down_newlog":0}}) #默认0点定时下载新记录
now_hour=datetime.datetime.now().hour
if now_hour != cfg.iaccess.down_newlog:
return
q_server = queqe_server()
trans_key=dev.devobj.get_transaction_cache()
mday=q_server.get(trans_key)
if mday is None:
q_server.set(dev.devobj.get_transaction_cache(), "0")
return
now_day=datetime.datetime.now().day
if int(mday) == now_day:
return
else:
q_server.set(trans_key, "%d"%now_day)
acmd=DevCmd(SN=dev.devobj, CmdContent="DOWN_NEWLOG", CmdReturn=1000) #正在执行下载新记录
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
q_server.connection.disconnect()
except:
print_exc()
printf("22. %s check_and_down_log "%dev.devobj.alias.encode("gb18030"), True)
try:
ret = dev.comm.get_transaction(True)
except:
printf("%s *********get_transaction error"%dev.devobj.alias.encode("gb18030"), True)
printf("23. %s ---check_and_down_log rec=%d"%(dev.devobj.alias.encode("gb18030"), ret['result']), True)
if ret['result']>0:
for i in range(1, ret['result']+1, 1):
try:
doorobj=None
log = ret['data'][i]
str = log.split(",",7)
if (strtoint(str[4]) == INOUT_SEVER) or (strtoint(str[4]) == INOUT_SHORT): #辅助事件
pass
#print str[3]
elif (strtoint(str[4]) == EVENT_LINKCONTROL) and (strtoint(str[6]) == INOUT_SEVER): #连动事件
pass
#print str[3]
elif (strtoint(str[4]) == EVENT_LINKCONTROL) and (strtoint(str[6]) == INOUT_SHORT): #连动事件
pass
#print str[3]
else:
doorobj=AccDoor.objects.filter(device=dev.devobj).filter(door_no=str[3])
if doorobj is not None:
str[3]=doorobj and doorobj[0].id or 0
restr="%s,%s,%s,%s,%s,%s,%s"%(FmtTTime(str[6]),str[1],str[0],str[3],str[4],str[5],str[2])
save_event_log(restr.split(",",7), doorobj, dev.devobj)
except:
print_exc()
continue
return ret['result']
def check_server_stop(procename, pid, devs):
#服务停止进程退出
try:
ret=False
q_server=queqe_server()
proce_server_key="%s_SERVER"%procename
proce_stop=q_server.get(proce_server_key)
q_server.connection.disconnect()
if proce_stop == "STOP":
q_server=queqe_server()
q_server.delete(proce_server_key)
#q_server.deletemore("CENTER_HEART_*")
printf("%s servers return "%procename, True)
for devsn in devs:
dev = devs[devsn]
try:
acmd=DevCmd(SN=dev.devobj, CmdContent="DISCONNECT", CmdReturn=-1001) #设备监控显示设备断开
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
ret=True
q_server.connection.disconnect()
except:
print_exc()
printf("stop server error")
return ret
def wait_com_pause(com, timeout): #COM_1_CHANNELS PROCESS_WAIT_PAUSE
q_server=queqe_server()
channel_key="COM_%d_CHANNELS"%com #COM_1_CHANNELS
com_key="COM_%d_PID"%com
com_pid=q_server.get(com_key)
if com_pid is None: #串口不存在
return True
q_server.set(channel_key, "%d"%PROCESS_WAIT_PAUSE)
for i in range(0, timeout, 1):
mchan=q_server.get(channel_key)
if mchan is None:
time.sleep(1)
continue
try:
mchan=int(mchan)
except:
mchan=0
if int(mchan) > PROCESS_WAIT_PAUSE:
return True
time.sleep(1)
q_server.connection.disconnect()
return False
def set_comm_run(com): #删除暂停通道
q_server=queqe_server()
channel_key="COM_%d_CHANNELS"%com #COM_1_CHANNELS
channel_timeout_key="COM_%d_CHANNELS_TIMEOUT"%com
q_server.delete(channel_key)
q_server.delete(channel_timeout_key)
q_server.connection.disconnect()
#实时任务处理函数,用于进程调用
def net_task_process(devobjs, devcount, procename="", process_heart={}, children_pid={}):
from mysite.iclock.models.model_device import Device, COMMU_MODE_PULL_RS485
from mysite.iaccess.view import check_acpanel_args
from mysite.iclock.models.model_devcmd import DevCmd
q_server=queqe_server()
tt = q_server.get("CENTER_RUNING") #主服务ID
pid = os.getpid()#子进程pid
children_pid[procename] = pid
#print '------child process=',pid
#print '-------net_tast_process process_heart =',process_heart
# if procename.find("COM") >= 0:
# com_pid=q_server.get("%s_PID"%procename)
# if com_pid:
# pid=int(com_pid)
# else:
# pid=0
# print "com pid=", pid
# else:
# pid=os.getpid()
#视频服务器初始化
q_server.rpush(CENTER_PROCE_LIST, "%s"%procename)
devs = {}
for devobj in devobjs:
try:
add_dev_dict(devs, devobj)
q_server.delete(devs[devobj.id].comm_tmp)#清除原有命令
acmd=DevCmd(SN=devobj, CmdContent="CONNECT", CmdReturn=devs[devobj.id].comm.hcommpro)
q_server.set(devs[devobj.id].comm_tmp, pickle.dumps(acmd))
except:
printf("add_dev_dict %d error"%devobj.id, True)
#print "add_dev_dict %d error"%devobj.id
if check_server_stop(procename, pid, devs): #启动时检测停止服务
return 0
printf("%s :parent process: %d"%(procename, os.getpid()))
#print "%s :parent process: %d"%(procename, os.getpid())
while(1):
#服务停止进程退出
proce_server_key="%s_SERVER"%procename
proce_stop=q_server.get(proce_server_key)
if proce_stop == "STOP":
try:
q_server.delete(proce_server_key)
#q_server.deletemore("CENTER_HEART_*")
printf("%s servers return "%procename, True)
#print "%s servers return "%procename
for devsn in devs:
dev = devs[devsn]
try:
acmd=DevCmd(SN=dev.devobj, CmdContent="DISCONNECT", CmdReturn=-1001) #设备监控显示设备断开
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
q_server.connection.disconnect()
except:
print_exc()
printf("stop server error ", True)
return 0
pid_t=time.mktime(datetime.datetime.now().timetuple())
#print '-----pid_t=',pid_t
process_heart[procename] = pid_t
#q_server.set(CENTER_PROCE_HEART%procename, str(pid_t))
#主服务ID不一致退出
if tt != q_server.get("CENTER_RUNING"):
try:
printf("%s servers id error return "%procename, True)
for devsn in devs:
dev = devs[devsn]
try:
acmd=DevCmd(SN=dev.devobj, CmdContent="DISCONNECT", CmdReturn=-1001) #设备监控显示设备断开
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
q_server.connection.disconnect()
except:
print_exc()
printf("stop server error ", True)
return 0
#线程与缓存中的设备同步
proce_cache_devset=[] #前台缓存表
proce_thread_devset=[] #线程表
for i in range(0, q_server.llen(procename)):
try:
proc_cache=q_server.lindex(procename, i)
proce_dev=pickle.loads(proc_cache)
except:
proce_dev=None
try:
if proce_dev:
proce_cache_devset.append(proce_dev)
except:
printf("proce_cache_devset append device error")
#print procename, "proce_cache_devset=" , proce_cache_devset
del_dev={}
for devsn in devs:
try:
thread_dev = devs[devsn].devobj.getdevinfo()
proce_thread_devset.append(thread_dev)
except:
printf("proce_thread_devset append device error")
#删除设备
if thread_dev not in proce_cache_devset:
try:
devs[devsn].comm.disconnect()
#devs.__delitem__(devsn)
del_dev[devsn] = devs[devsn]
#print "1. %s delete device %d"%(procename, thread_dev["id"])
printf("1. %s delete device %d"%(procename, thread_dev["id"]), True)
except:
print_exc()
printf("16. %s delete device error id=%d"%(procename, thread_dev["id"]), True)
try:
for del_d in del_dev: #解决运行期
del devs[del_d]
except:
printf("procecache delete device error", True)
#print procename, "proce_thread_devset=" , proce_thread_devset
#增加设备(缓存中的设备不在进程中)
for proce_cache in proce_cache_devset:
if proce_cache not in proce_thread_devset:
try:
cdev=Device.objects.filter(id=proce_cache["id"])
if cdev:
add_dev_dict(devs, cdev[0])
q_server.delete(devs[cdev[0].id].comm_tmp)
acmd=DevCmd(SN=cdev[0], CmdContent="CONNECT", CmdReturn=devs[cdev[0].id].comm.hcommpro)
q_server.set(devs[cdev[0].id].comm_tmp, pickle.dumps(acmd))
#print "22. %s add device %s"%(procename, proce_cache["id"])
printf("22. %s add device %s"%(procename, proce_cache["id"]), True)
except:
print_exc()
printf("add device error", True)
continue
printf("3. %s proce_thread_devset=%s, proce_cache_devset=%s"%(procename, proce_thread_devset, proce_cache_devset))
if procename.find("COM") >= 0:
try:
if devs.__len__()==0:#串口进程设备为空时自动中止进程
# q_server.deletemore("CENTER_HEART_*")
# q_server.deletemore("COM_1_*")
# q_server.connection.disconnect()
## return 0
#用于新增485设备, 线程暂停
channel_key="%s_CHANNELS"%procename #COM_1_CHANNELS
channel_timeout_key="%s_CHANNELS_TIMEOUT"%procename
channel_t=time.mktime(datetime.datetime.now().timetuple())
mchan_t=q_server.get(channel_timeout_key)
if mchan_t:
try:
mchan_t=int(mchan_t)
except:
mchan_t=0
if channel_t - int(mchan_t) > PAUSE_TIMEOUT:#暂停超时,取消暂停
q_server.delete(channel_key)
q_server.delete(channel_timeout_key)
mchan=q_server.get(channel_key)
if mchan is None:
mchan = 0
try:
mchan=int(mchan)
except:
mchan=0
if int(mchan) == PROCESS_WAIT_PAUSE:
q_server.set(channel_key, "%d"%PROCESS_PAUSE)
q_server.set(channel_timeout_key, "%d"%(int(channel_t)))
if int(mchan) > PROCESS_NORMAL:
continue
except:
printf("proccache device empty return error", True)
#设备通讯
for devsn in devs:
#服务停止进程退出
proce_server_key="%s_SERVER"%procename
proce_stop=q_server.get(proce_server_key)
if proce_stop == "STOP":
try:
q_server.delete(proce_server_key)
#q_server.deletemore("CENTER_HEART_*")
printf("%s servers return "%procename, True)
for devsn in devs:
dev = devs[devsn]
try:
acmd=DevCmd(SN=dev.devobj, CmdContent="DISCONNECT", CmdReturn=-1001) #设备监控显示设备断开
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
q_server.connection.disconnect()
except:
print_exc()
printf("stop server error ", True)
return 0
#主服务ID不一致退出
if tt != q_server.get("CENTER_RUNING"):
try:
printf("%s servers id error return "%procename, True)
for devsn in devs:
dev = devs[devsn]
try:
acmd=DevCmd(SN=dev.devobj, CmdContent="DISCONNECT", CmdReturn=-1001) #设备监控显示设备断开
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
q_server.connection.disconnect()
except:
print_exc()
printf("stop server error ", True)
return 0
if procename.find("COM") >= 0:
try:
#用于新增485设备, 线程暂停
channel_key="%s_CHANNELS"%procename #COM_1_CHANNELS
channel_timeout_key="%s_CHANNELS_TIMEOUT"%procename
channel_t=time.mktime(datetime.datetime.now().timetuple())
mchan_t=q_server.get(channel_timeout_key)
if mchan_t:
try:
mchan_t=int(mchan_t)
except:
mchan_t=0
if channel_t - int(mchan_t) > PAUSE_TIMEOUT:#暂停超时,取消暂停
q_server.delete(channel_key)
q_server.delete(channel_timeout_key)
mchan=q_server.get(channel_key)
if mchan is None:
mchan = 0
try:
mchan=int(mchan)
except:
mchan=0
if int(mchan) == PROCESS_WAIT_PAUSE:
q_server.set(channel_key, "%d"%PROCESS_PAUSE)
q_server.set(channel_timeout_key, "%d"%(int(channel_t)))
if int(mchan) > PROCESS_NORMAL:
continue
except:
printf("485 pause error", True)
dev = devs[devsn]
#设备被禁用
try:
cdev=Device.objects.filter(id=dev.devobj.id)
if cdev:
if not cdev[0].check_dev_enabled():
acmd=DevCmd(SN=dev.devobj, CmdContent="DISABLED", CmdReturn=-1002)
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
if dev.comm.hcommpro > 0:
dev.comm.disconnect()
now_t=time.mktime(datetime.datetime.now().timetuple())
if now_t - dev.try_connect_delay < MAX_INTERVAL_CONNTECT_TIME:
dev.try_connect_delay = now_t - MAX_INTERVAL_CONNTECT_TIME #启用设备立即重连
continue
else:
printf("14. check_dev_enabled not find device", True)
continue #设备不存在,已被删除
except:
printf("4. check_dev_enabled error", True)
printf("5. %s -- dev.comm.hcommpro=%d"%(dev.devobj.alias.encode("gb18030"), dev.comm.hcommpro))
if dev.comm.hcommpro <= 0:
now_t=time.mktime(datetime.datetime.now().timetuple())
if now_t - dev.try_connect_delay > MAX_INTERVAL_CONNTECT_TIME: #未连接设备, 60秒重连一次
try:
dev.try_connect_count += 1
dev.try_connect_delay=time.mktime(datetime.datetime.now().timetuple())
printf("5. %s -- try connect device"%dev.devobj.alias.encode("gb18030"))
dev.comm.disconnect()
dev.comm.connect()
acmd=DevCmd(SN=dev.devobj, CmdContent="CONNECT", CmdReturn=dev.comm.hcommpro)
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
if dev.comm.hcommpro>0: #重试连接成功
try:
dev.try_connect_count = 0
set_door_connect(dev.devobj, 1)
if dev.devobj.sync_time:
dev.devobj.set_time(False)
check_acpanel_args(dev, dev.comm)#关于门禁控制器设备参数的回调函数
except:
print_exc()
else:
try:
set_door_connect(dev.devobj, 0)
if dev.try_connect_count > MAX_CONNECT_COUNT: #禁用设备
printf("6. %s -- set dev disabled"%(dev.devobj.alias.encode("gb18030")))
dev.try_connect_count = 0
dev.devobj.set_dev_disabled()
except:
print_exc()
continue
try:
if process_general_cmd(dev,q_server): #下载命令
continue
except:
printf("process_general_cmd error", True)
try:
rtlog = dev.comm.get_rtlog()
#print dev.devobj.alias," rtlog result:",rtlog["data"]#实时事件
printf("7.%s -- rtlog result:%s"%(dev.devobj.alias.encode("gb18030"), rtlog["data"]))#固件最原始数据
if(is_comm_io_error(rtlog["result"])):
printf("7. %s -- get rtlog return failed result=%d"%(dev.devobj.alias.encode("gb18030"), rtlog["result"]), True)
acmd=DevCmd(SN=dev.devobj, CmdContent="REAL_LOG", CmdReturn=rtlog["result"])
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
dev.try_failed_time +=1
if(dev.try_failed_time>MAX_TRY_COMM_TIME):
try:
dev.comm.disconnect()
dev.try_connect_delay=time.mktime(datetime.datetime.now().timetuple())
set_door_connect(dev.devobj, 0)
dev.try_failed_time = 0
acmd=DevCmd(SN=dev.devobj, CmdContent="DISCONNECT", CmdReturn=-1001)
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
except:
print_exc()
continue
else:
try:
acmd=DevCmd(SN=dev.devobj, CmdContent="REAL_LOG", CmdReturn=1)
q_server.set(dev.comm_tmp, pickle.dumps(acmd))
checkdevice_and_savecache(q_server, dev.devobj)
except:
print_exc()
if (rtlog["result"] >0):
appendrtlog(q_server, dev.devobj, rtlog["data"])
except:
print_exc()
printf("get rtlog error", True)
try:
ret_log=check_and_down_log(dev) #检查定时下载记录
if ret_log>0:
printf("check_and_down_log end .... ret_log=%d"%ret_log, True)
#print "check_and_down_log end .... ret_log=%d"%ret_log
pid_t=time.mktime(datetime.datetime.now().timetuple())
#print '---pid_t2=',pid_t
process_heart[procename] = pid_t
#g_devcenter.process_heart[procename] = pid_t
#q_server.set(CENTER_PROCE_HEART%procename, str(pid_t))
except:
print_exc()
printf("check_and_down_log error", True)
delaytime=(1200-150*devs.__len__())/1000.0
if delaytime > 0.50:
time.sleep(delaytime)
else:
time.sleep(0.50)
q_server.connection.disconnect()
return 0
class TThreadMonitor(object):
def __init__(self,func,args):
self.func = func
self.args = args
def __call__(self):
apply(self.func, self.args)
class TDevDataCommCenter(object):
def __init__(self, process_heart, children_pid):
#from mysite.iclock.models.model_device import Device, DEVICE_ACCESS_CONTROL_PANEL, COMMU_MODE_PULL_RS485, COMMU_MODE_PULL_TCPIP
#print '----process_heart=',process_heart
cfg=dict4ini.DictIni(os.getcwd()+"/appconfig.ini",values={"iaccess":{"max_thread":5}})
self.max_thread = cfg.iaccess.max_thread
self.pool = Pool(processes = self.max_thread)#进程池
self.comport_set={}
self.NetDev=Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL).filter(comm_type=COMMU_MODE_PULL_TCPIP)
self.ComDev=Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL).filter(comm_type=COMMU_MODE_PULL_RS485)
printf("self.NetDev=%s"%self.NetDev)
self.net_dev_set=self.set_thread_dev(self.NetDev) #将设备平均分配 生成设备列表
printf("self.net_dev_set=%s"%self.net_dev_set)
self.killRsagent()
self.pid=os.getpid()
#print '----self.pid=',self.pid主进程pid
q_server=queqe_server()
q_server.set(CENTER_MAIN_PID, "%d"%(self.pid))#主进程--darcy
#print "net_dev_set=", self.net_dev_set
for i in range(0, self.max_thread):
devs = self.net_dev_set[i]
tName = "Net%d" % i
process_heart[tName] = time.mktime(datetime.datetime.now().timetuple())#子进程心跳#self.pid
#t = threading.Thread(target = TThreadMonitor(net_task_process,(devs, len(devs), tName)))
q_server.delete(tName)
for dev in devs:
q_server.rpush(tName, pickle.dumps(dev.getdevinfo()))
self.pool.apply_async(net_task_process, [devs, len(devs), tName, process_heart, children_pid])#net_task_process进程调用--tcp/ip
self.comports =Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL,comm_type=COMMU_MODE_PULL_RS485).values('com_port').distinct()
for comport in self.comports:
comdevs = Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL,comm_type=COMMU_MODE_PULL_RS485,com_port=comport['com_port'])
tName="COM_%d"%comport["com_port"]
devs=[]
q_server.delete(tName)
for comdev in comdevs:
devs.append(comdev)
q_server.rpush(tName, pickle.dumps(comdev.getdevinfo()))
p = Process(target=net_task_process, args=(devs, len(devs), tName, process_heart, children_pid))#net_task_process进程调用--485
q_server.set("%s_PID"%tName, "%d"%(p._parent_pid))
p.start()
#t = threading.Thread(target = TThreadMonitor(net_task_process,(devs, len(devs), tName)))
q_server.save()
q_server.connection.disconnect()
def killRsagent(self):
return os.system("taskkill /im plrscagent.* /f")
def set_thread_dev(self, devset):
devs=[]
for i in range(0, self.max_thread):
devs.append([])#
for i in range(0, len(devset)):
devs[i%self.max_thread].append(devset[i])
return devs
def refushcomport(self):
from mysite.iclock.models.model_device import Device, DEVICE_ACCESS_CONTROL_PANEL, COMMU_MODE_PULL_RS485, COMMU_MODE_PULL_TCPIP
self.comports =Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL,comm_type=COMMU_MODE_PULL_RS485).values('com_port').distinct()
self.NetDev=Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL).filter(comm_type=COMMU_MODE_PULL_TCPIP)
#同步前台后台设备
def delete_device(self, devinfo):
from mysite.iclock.models.model_device import Device, DEVICE_ACCESS_CONTROL_PANEL, COMMU_MODE_PULL_RS485, COMMU_MODE_PULL_TCPIP
q_server=queqe_server()
if devinfo["comm_type"]==COMMU_MODE_PULL_TCPIP:
for i in range(0, len(self.net_dev_set)):
for net_dev in self.net_dev_set[i]:
if net_dev.id== devinfo["id"]:
self.net_dev_set[i].remove(net_dev)
tName="Net%d"%i
q_server.delete(tName)
for dev in self.net_dev_set[i]:
q_server.rpush(tName, pickle.dumps(dev.getdevinfo()))
elif devinfo["comm_type"]==COMMU_MODE_PULL_RS485:
comdevs = Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL,comm_type=COMMU_MODE_PULL_RS485,com_port=devinfo["com_port"])
tName="COM_%d"%devinfo["com_port"]
q_server.delete(tName)
for dev in comdevs:
q_server.rpush(tName, pickle.dumps(dev.getdevinfo()))
q_server.save()
q_server.connection.disconnect()
def edit_device(self, dev):
from mysite.iclock.models.model_device import Device, DEVICE_ACCESS_CONTROL_PANEL, COMMU_MODE_PULL_RS485, COMMU_MODE_PULL_TCPIP
q_server=queqe_server()
if dev.comm_type==COMMU_MODE_PULL_TCPIP:
for i in range(0, len(self.net_dev_set)):
for net_dev in net_dev_set[i]:
if net_dev.id == dev.id:
ii=net_dev_set[i].index(net_dev)
net_dev_set[i][ii]=dev
#修改缓存设备信息
tName="Net%d"%i
q_server.delete(tName)
dev=[]
for dev0 in self.net_dev_set[i]:
try:
dev=Devivce.objects.filter(id=dev0.id)
q_server.rpush(tName, pickle.dumps(dev[0].getdevinfo()))
except:
printf("edit_device error")
elif dev.comm_type==COMMU_MODE_PULL_RS485:
comdevs = Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL,comm_type=COMMU_MODE_PULL_RS485,com_port=dev.com_port)
tName="COM_%d"%dev.com_port
devs=[]
q_server.delete(tName)
for comdev in comdevs:
devs.append(comdev)
q_server.rpush(tName, pickle.dumps(comdev.getdevinfo()))
q_server.save()
q_server.connection.disconnect()
def adddevice(self,dev):
from mysite.iclock.models.model_device import Device, DEVICE_ACCESS_CONTROL_PANEL, COMMU_MODE_PULL_RS485, COMMU_MODE_PULL_TCPIP
self.NetDev=Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL).filter(comm_type=COMMU_MODE_PULL_TCPIP)
q_server=queqe_server()
if dev.comm_type==COMMU_MODE_PULL_TCPIP:
new_dev=True
for dev_set in self.net_dev_set:
if dev in dev_set:
new_dev=False
if new_dev: #设备不在线程中
for i in range(0, self.max_thread):
if len(self.net_dev_set[i]) <= len(self.NetDev)/self.max_thread: #分配至后台进程
self.net_dev_set[i].append(dev)
tName="Net%d"%i
q_server.rpush(tName, pickle.dumps(dev.getdevinfo()))
break
elif dev.comm_type==COMMU_MODE_PULL_RS485:
comdevs = Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL,comm_type=COMMU_MODE_PULL_RS485,com_port=dev.com_port)
tName="COM_%d"%dev.com_port
devs=[]
q_server.delete(tName)
for comdev in comdevs:
devs.append(comdev)
q_server.rpush(tName, pickle.dumps(comdev.getdevinfo()))
com_list=[]
for v in self.comports:
com_list.append(v.values()[0])
if dev.com_port not in com_list:
#t = threading.Thread(target = TThreadMonitor(net_task_process,(devs, len(devs), tName)))
self.comports =Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL,comm_type=COMMU_MODE_PULL_RS485).values('com_port').distinct()
p = Process(target=net_task_process, args=(devs, len(devs), tName))
q_server.set("%s_PID"%tName, "%d"%(p._parent_pid))
p.start()
q_server.save()
q_server.connection.disconnect()
def check_sync_db_cachel():
from mysite.iclock.models.model_device import Device, DEVICE_ACCESS_CONTROL_PANEL
from mysite.iclock.models.model_devcmd import DevCmd, trigger_cmd_device
device=Device.objects.filter(device_type=DEVICE_ACCESS_CONTROL_PANEL)
for dev in device:
q_server=queqe_server()
q_server.delete(dev.new_command_list_name())
q_server.connection.disconnect()
devcmd=DevCmd.objects.filter(CmdReturn=None).filter(CmdImmediately=0).filter(SN=dev).order_by('id')
for cmd in devcmd:
trigger_cmd_device(cmd)
#某个进程异常,杀掉子进程,然后重启所有进程
def killall_pid(children_pid=None):
try:
q_server = queqe_server()
if children_pid:#只杀子进程,避免“自杀”
for pid in children_pid.values:
os.system("taskkill /PID %s /F /T" % pid)
else:
main_pid = q_server.get(CENTER_MAIN_PID)
os.system("taskkill /PID %s /F /T" % main_pid)
q_server.connection.disconnect()
except:
pass
def clear_file_cache(q_server):
#q_server.deletemore("CENTER_HEART_*") #删除所有进程ID
q_server.deletemore("ICLOCK_*_LAST_ACTIVEITY") #最后连接时间
q_server.deletemore("DEV_COMM_PROGRESS_*") #进度条缓存
q_server.deletemore("DEV_COMM_SYNC_*") #进度条缓存
q_server.deletemore("*_CHANNELS") #清除485暂停通道
q_server.deletemore("*_CHANNELS_TIMEOUT")
q_server.deletemore("*_PID") #清除485暂停通道
q_server.deletemore("ICLOCK_*_TMP") #清除当前命令缓存
q_server.deletemore("DEVICE_DOOR_*") #门状态缓存
q_server.deletemore("MONITOR_RT") #清除实时监控数据
q_server.delete(CENTER_PROCE_LIST)
q_server.delete(DEVOPT)
q_server.deletemore("*_SERVER")
tt="{0:%Y-%m-%d %X}".format(datetime.datetime.now())
q_server.set("CENTER_RUNING", tt)
def rundatacommcenter():
from mysite.iclock.models.model_device import Device, COMMU_MODE_PULL_RS485
global g_devcenter
manager = Manager()
printf("1.--rundatacenter--")
process_heart = manager.dict()#子进程心跳
children_pid = manager.dict()#总进程pid(用于杀僵尸进程)
#dev_doorstatus = manager.dict()#实时监控门状态时。保存设备状态(未解析前)
# check_sync_db_cachel() #同步数据库与缓存数据
try:
deletelog()
except:
pass
killall_pid()#缓存中记录了之前的主进程pid,杀掉
#print '---current_pid=',os.getpid()
try:
q_server=queqe_server()
clear_file_cache(q_server)
g_devcenter = TDevDataCommCenter(process_heart, children_pid)
except:
print_exc();
# global g_video_server
# VidDev=Device.objects.filter(device_type=DEVICE_VIDEO_SERVER)
# for vid in VidDev:
# vidcom = TDevVideo()
# vidcom.login(vid.ipaddress, vid.ip_port, vid.video_login, vid.comm_pwd)
# g_video_server[vid.ipaddress]=vidcom
# print "aaaa g_video_server", g_video_server
while True:
#print '---while true-----'
try:
len=q_server.llen(DEVOPT)
if len > 0:
acmd=q_server.lpop(DEVOPT)
if acmd is None:
continue
try:
devinfo=pickle.loads(acmd)
except:
devinfo=None
if devinfo is not None:
try:
#print "2. add com device %s operate=%s"%(devinfo["id"], devinfo["operatstate"])
printf("2. add com device %s operate=%s"%(devinfo["id"], devinfo["operatstate"]), True)
if (devinfo["operatstate"]==OPERAT_ADD): #新增设备
dev=Device.objects.filter(id = devinfo["id"])
if dev:
g_devcenter.adddevice(dev[0])
else:
q_server.lpush(DEVOPT, pickle.dumps(devinfo)) #设备还未save进数据库
time.sleep(10)
elif (devinfo["operatstate"]==OPERAT_EDIT): #修改设备时,先删除后增加设备
g_devcenter.delete_device(devinfo)
dev=Device.objects.filter(id = devinfo["id"])
if dev:
g_devcenter.adddevice(dev[0])
else:
q_server.lpush(DEVOPT, pickle.dumps(devinfo)) #设备还未save进数据库
time.sleep(10)
elif (devinfo["operatstate"]==OPERAT_DEL):
g_devcenter.delete_device(devinfo)
except:
printf("device opreater error", True)
continue
else:
time.sleep(5)
if (q_server.llen("MONITOR_RT")>MAX_RTLOG):
q_server.lock_delete("MONITOR_RT")
q_server.lock_delete("ALARM_RT")
#僵尸进程检测
pid_set=q_server.lrange(CENTER_PROCE_LIST, 0, -1)
#print '-----pid_set=',pid_set
for p in pid_set:
#pid_time = q_server.get(CENTER_PROCE_HEART%p)
#pid_time = g_devcenter.process_heart[p]
#print '-----here process_heart=',process_heart
pid_time = process_heart[p]
#print '----new pid_time=',pid_time
#print '---!!!!!!!-children_pid=',children_pid
if pid_time:
now_t = time.mktime(datetime.datetime.now().timetuple())
#print '-----now_t-float(pid_time)=',now_t - float(pid_time)
if now_t - pid_time > 60*60*1:# #now_t - float(pid_time)#1小时没心跳, 杀掉所有进程,重新启动
printf("PID die**********", True)
#print '---PID die--'
try:
killall_pid()#杀掉后服务会自动重启
print '****kill pid finished'
#time.sleep(60*5)#60*5
except:
print '-----killall pid error'
break;
except:
continue
time.sleep(1)
q_server.connection.disconnect()
if __name__ == '__main__':
print 'start at:', ctime()
rundatacenter()
print 'finish'
| [
"xiongjianhong@a5b1b082-b159-ab12-9a9e-d79e7dfc8adf"
] | xiongjianhong@a5b1b082-b159-ab12-9a9e-d79e7dfc8adf |
270085783dfae395dfed329ee1efc4d371fe5491 | bccd16717d20d673cb514d6ac68e624c2c4dae88 | /sdk/python/pulumi_gcp/endpoints/outputs.py | bc1adc66e71939c4a0506decc6499354e78b3c93 | [
"MPL-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | permissive | dimpu47/pulumi-gcp | e78d228f7c2c929ad3e191331b75c6e4c4cc4fa9 | 38355de300a5768e11c49d344a8165ba0735deed | refs/heads/master | 2023-07-07T13:00:15.682157 | 2020-09-23T18:43:11 | 2020-09-23T18:43:11 | 173,437,663 | 0 | 0 | Apache-2.0 | 2023-07-07T01:05:58 | 2019-03-02T11:06:19 | Go | UTF-8 | Python | false | false | 5,387 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'ServiceApi',
'ServiceApiMethod',
'ServiceEndpoint',
'ServiceIamBindingCondition',
'ServiceIamMemberCondition',
]
@pulumi.output_type
class ServiceApi(dict):
def __init__(__self__, *,
methods: Optional[List['outputs.ServiceApiMethod']] = None,
name: Optional[str] = None,
syntax: Optional[str] = None,
version: Optional[str] = None):
if methods is not None:
pulumi.set(__self__, "methods", methods)
if name is not None:
pulumi.set(__self__, "name", name)
if syntax is not None:
pulumi.set(__self__, "syntax", syntax)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def methods(self) -> Optional[List['outputs.ServiceApiMethod']]:
return pulumi.get(self, "methods")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter
def syntax(self) -> Optional[str]:
return pulumi.get(self, "syntax")
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceApiMethod(dict):
def __init__(__self__, *,
name: Optional[str] = None,
request_type: Optional[str] = None,
response_type: Optional[str] = None,
syntax: Optional[str] = None):
if name is not None:
pulumi.set(__self__, "name", name)
if request_type is not None:
pulumi.set(__self__, "request_type", request_type)
if response_type is not None:
pulumi.set(__self__, "response_type", response_type)
if syntax is not None:
pulumi.set(__self__, "syntax", syntax)
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requestType")
def request_type(self) -> Optional[str]:
return pulumi.get(self, "request_type")
@property
@pulumi.getter(name="responseType")
def response_type(self) -> Optional[str]:
return pulumi.get(self, "response_type")
@property
@pulumi.getter
def syntax(self) -> Optional[str]:
return pulumi.get(self, "syntax")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceEndpoint(dict):
def __init__(__self__, *,
address: Optional[str] = None,
name: Optional[str] = None):
if address is not None:
pulumi.set(__self__, "address", address)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def address(self) -> Optional[str]:
return pulumi.get(self, "address")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceIamBindingCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceIamMemberCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| [
"public@paulstack.co.uk"
] | public@paulstack.co.uk |
d8327f24f64ffc940857d0a3ef0f3bfcf4d1ceb2 | 8ca19f1a31070738b376c0370c4bebf6b7efcb43 | /office365/communications/operations/update_recording_status.py | 562e461d09952648e16c8272843d786b126ccdf4 | [
"MIT"
] | permissive | vgrem/Office365-REST-Python-Client | 2ef153d737c6ed5445ba1e446aeaec39c4ef4ed3 | cbd245d1af8d69e013c469cfc2a9851f51c91417 | refs/heads/master | 2023-09-02T14:20:40.109462 | 2023-08-31T19:14:05 | 2023-08-31T19:14:05 | 51,305,798 | 1,006 | 326 | MIT | 2023-08-28T05:38:02 | 2016-02-08T15:24:51 | Python | UTF-8 | Python | false | false | 203 | py | from office365.communications.operations.comms import CommsOperation
class UpdateRecordingStatusOperation(CommsOperation):
"""Describes the response format of an update recording status action."""
| [
"vvgrem@gmail.com"
] | vvgrem@gmail.com |
9383aacff23eec6d52cef01ce4e047d49be74540 | 2ea5efd9ccc926e368e7132d7709f48265dae5f3 | /Codes/02_Input.py | 6290817714e68706d9ccddf2c6420490316a7f6a | [] | no_license | ravi4all/PythonWE_Morning_2020 | a843f3fde886b0be42d5f935daecada17cf4ff1c | 87d4f84ef8142ac2c7e08d70a9fab359da08287b | refs/heads/master | 2020-12-14T19:26:37.538793 | 2020-03-21T08:00:51 | 2020-03-21T08:00:51 | 234,846,451 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | # by default python takes input in strung format
name = input("Enter your name : ")
print("Hello "+name)
# we need to type cast the input()
num_1 = int(input("Enter first number : "))
num_2 = int(input("Enter second number : "))
result = num_1 + num_2
print("Sum is",result)
# Multiline Print
print("""
1. Add
2. Sub
3. Mul
4. Div
""")
| [
"noreply@github.com"
] | ravi4all.noreply@github.com |
b6ce66b2673cdd275ddff44c827d484ceec0c153 | 4fe0ed5e592641b272aa2167ae591155a9cad416 | /pyqtgraph_ex/data_tree.py | 929a705c3d51eebc6dc80cf3b7130a49086f8e52 | [] | no_license | AlexandreMarcotte/test_code | cf715caee730cfdafa7cf97bd011ac15443872f3 | 07e115055befd55d4598dd8a4b33bbdd00ba6f5a | refs/heads/master | 2021-06-07T05:06:12.085390 | 2019-05-06T23:45:38 | 2019-05-06T23:45:38 | 137,810,297 | 0 | 0 | null | 2021-06-01T23:44:40 | 2018-06-18T21:50:39 | Python | UTF-8 | Python | false | false | 722 | py | # -*- coding: utf-8 -*-
"""
Simple use of DataTreeWidget to display a structure of nested dicts, lists, and arrays
"""
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
import numpy as np
app = QtGui.QApplication([])
d = {
'list1': [1,2,3,4,5,6, {'nested1': 'aaaaa', 'nested2': 'bbbbb'}, "seven"],
'dict1': {
'x': 1,
'y': 2,
'z': 'three'
},
'array1 (20x20)': np.ones((10,10))
}
tree = pg.DataTreeWidget(data=d)
tree.show()
tree.setWindowTitle('pyqtgraph example: DataTreeWidget')
tree.resize(600,600)
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_() | [
"alexandre.marcotte.1094@gmail.com"
] | alexandre.marcotte.1094@gmail.com |
9979efac15eedc30be1b3f32a462d0c9599d248b | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/eventgrid/get_domain_topic.py | cbdaf3633482a6ab8964a94e6381f42ff672dead | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 4,056 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetDomainTopicResult',
'AwaitableGetDomainTopicResult',
'get_domain_topic',
]
@pulumi.output_type
class GetDomainTopicResult:
"""
Domain Topic.
"""
def __init__(__self__, id=None, name=None, provisioning_state=None, system_data=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified identifier of the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the domain topic.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system metadata relating to Domain Topic resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetDomainTopicResult(GetDomainTopicResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDomainTopicResult(
id=self.id,
name=self.name,
provisioning_state=self.provisioning_state,
system_data=self.system_data,
type=self.type)
def get_domain_topic(domain_name: Optional[str] = None,
domain_topic_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDomainTopicResult:
"""
Domain Topic.
API Version: 2020-06-01.
:param str domain_name: Name of the domain.
:param str domain_topic_name: Name of the topic.
:param str resource_group_name: The name of the resource group within the user's subscription.
"""
__args__ = dict()
__args__['domainName'] = domain_name
__args__['domainTopicName'] = domain_topic_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:eventgrid:getDomainTopic', __args__, opts=opts, typ=GetDomainTopicResult).value
return AwaitableGetDomainTopicResult(
id=__ret__.id,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
system_data=__ret__.system_data,
type=__ret__.type)
| [
"noreply@github.com"
] | morrell.noreply@github.com |
2933cb872811c4d984c9ae6d1e0c0d305ec26d27 | fc0eda8560a26c88b790d236070ed0559d0dc4a4 | /leetcode/basicDS06_tree/b04_lc105_build_tree.py | 72aa63933b0ddc9b7bc22fb7b3cf578bb94f79e5 | [] | no_license | pankypan/DataStructureAndAlgo | b4bd417a16cdb594bbed2ca0220dbd63eb60f3c1 | 6c5d40d57d378994236549f8dea906c75121eadf | refs/heads/master | 2021-08-03T01:22:08.442709 | 2021-07-19T14:56:44 | 2021-07-19T14:56:44 | 279,599,190 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | # https://leetcode-cn.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/
from typing import List
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def buildTree(self, preorder: List[int], inorder: List[int]) -> TreeNode:
# base case
if len(preorder) <= 1:
return TreeNode(preorder[0]) if len(preorder) == 1 else None
root_val = preorder[0]
root = TreeNode(root_val)
root.left = self.buildTree(preorder[1: inorder.index(root_val) + 1], inorder[0: inorder.index(root_val)])
root.right = self.buildTree(preorder[inorder.index(root_val) + 1:], inorder[inorder.index(root_val) + 1:])
return root
if __name__ == '__main__':
pass
| [
"1356523334@qq.com"
] | 1356523334@qq.com |
84049e1555a83e8bb2650ff6e600098232396841 | 271812d493b7e23550cbc17490d48a882c3179b1 | /brubeck/timekeeping.py | 8ba9e2e3df86fff907271bcd9a853c743de493b3 | [] | no_license | droot/brubeck | 69a5d86fc996ba367bda8d2bc9eb1075a36701fd | 35a44d62d590a5108eb4d5ba2f9f15f780cb99ce | refs/heads/master | 2021-01-16T22:49:46.263829 | 2011-09-06T14:18:49 | 2011-09-06T14:18:49 | 2,334,831 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,181 | py | import time
from datetime import datetime
from dateutil.parser import parse
from dictshield.fields import LongField
###
### Main Time Function
###
def curtime():
"""This funciton is the central method for getting the current time. It
represents the time in milliseconds and the timezone is UTC.
"""
return long(time.time() * 1000)
###
### Converstion Helpers
###
def datestring_to_millis(ds):
"""Takes a string representing the date and converts it to milliseconds
since epoch.
"""
dt = parse(ds)
return datetime_to_millis(dt)
def datetime_to_millis(dt):
"""Takes a datetime instances and converts it to milliseconds since epoch.
"""
seconds = dt.timetuple()
seconds_from_epoch = time.mktime(seconds)
return seconds_from_epoch * 1000 # milliseconds
def millis_to_datetime(ms):
"""Converts milliseconds into it's datetime equivalent
"""
seconds = ms / 1000.0
return datetime.fromtimestamp(seconds)
###
### Neckbeard date parsing (fuzzy!)
###
def prettydate(d):
"""I <3 U, StackOverflow.
http://stackoverflow.com/questions/410221/natural-relative-days-in-python
"""
diff = datetime.utcnow() - d
s = diff.seconds
if diff.days > 7 or diff.days < 0:
return d.strftime('%d %b %y')
elif diff.days == 1:
return '1 day ago'
elif diff.days > 1:
return '{} days ago'.format(diff.days)
elif s <= 1:
return 'just now'
elif s < 60:
return '{} seconds ago'.format(s)
elif s < 120:
return '1 minute ago'
elif s < 3600:
return '{} minutes ago'.format(s/60)
elif s < 7200:
return '1 hour ago'
else:
return '{} hours ago'.format(s/3600)
###
### Custom DictShield Field
###
class MillisecondField(LongField):
"""High precision time field.
"""
def __set__(self, instance, value):
"""__set__ is overriden to allow accepting date strings as input.
dateutil is used to parse strings into milliseconds.
"""
if isinstance(value, (str, unicode)):
value = datestring_to_millis(value)
instance._data[self.field_name] = value
| [
"jd@j2labs.net"
] | jd@j2labs.net |
4e3e1bd04cab8cfc321de73eaf5575d9c21fcb92 | 6b4f38370ce1126a7f74e13c2012ab238a01df93 | /azure-mgmt-sql/azure/mgmt/sql/operations/recoverable_databases_operations.py | d6bf1308fcf3724fd6009628fc5a06a4b35b5127 | [
"MIT"
] | permissive | action/azure-sdk-for-python | 52d8a278bfb2fbc9c7e11297e3bd21c604f906b1 | f06553e45451f065c87ee9ed503ac4be81e64a71 | refs/heads/master | 2020-12-03T02:13:52.566291 | 2017-06-30T18:42:49 | 2017-06-30T18:42:49 | 95,917,797 | 1 | 0 | null | 2017-06-30T19:25:58 | 2017-06-30T19:25:58 | null | UTF-8 | Python | false | false | 7,933 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class RecoverableDatabasesOperations(object):
"""RecoverableDatabasesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version to use for the request. Constant value: "2014-04-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2014-04-01"
self.config = config
def get(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a Recoverable Database, which is a resource representing a
database's Geo backup.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RecoverableDatabase
<azure.mgmt.sql.models.RecoverableDatabase>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/recoverableDatabases/{databaseName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RecoverableDatabase', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_by_server(
self, resource_group_name, server_name, custom_headers=None, raw=False, **operation_config):
"""Returns a list of Recoverable Databases.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RecoverableDatabasePaged
<azure.mgmt.sql.models.RecoverableDatabasePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/recoverableDatabases'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.RecoverableDatabasePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RecoverableDatabasePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
ee2019bff22f3d1aaa540ff78f7cad8ea8ce1e34 | 461c02a8aa79654dc85a1750a29661c95f2c3939 | /src/devilry/devilry/apps/developertools/tests.py | 948fdd077cfa007c11b86aa97a2d172767148ef6 | [] | no_license | espenak/devilry-django | 13bfdc5625218a453c336f296aff6a22d18ae03f | 0e033ebf44c03d864d0457918cf221cfcc704652 | refs/heads/master | 2021-01-23T21:11:01.651800 | 2013-08-05T13:01:23 | 2013-08-05T13:01:23 | 1,915,118 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | import unittest
from .management.commands.dev_build_extjs import SimpleJsFile
class TestJsFile(unittest.TestCase):
def testMultilineCommentPattern(self):
self.assertEquals(SimpleJsFile.MULTILINE_COMMENT_PATT.sub('', 'hello /**some \n\n comments \n\t here*/world'),
'hello world')
def testSingleLineCommentPattern(self):
self.assertEquals(SimpleJsFile.SINGLELINE_COMMENT_PATT.sub('', 'hello // this is a test\nworld'),
'hello \nworld')
| [
"post@espenak.net"
] | post@espenak.net |
b1210e5ee7ae62fa2c2d7ee61167ac2dc2fc80cf | 2f55769e4d6bc71bb8ca29399d3809b6d368cf28 | /Miniconda2/Lib/site-packages/sklearn/neighbors/setup.py | 92a2feb67ef0ea142e80503de7cee7e03cb2ba54 | [] | no_license | jian9695/GSV2SVF | e5ec08b2d37dbc64a461449f73eb7388de8ef233 | 6ed92dac13ea13dfca80f2c0336ea7006a6fce87 | refs/heads/master | 2023-03-02T03:35:17.033360 | 2023-02-27T02:01:48 | 2023-02-27T02:01:48 | 199,570,103 | 9 | 16 | null | 2022-10-28T14:31:05 | 2019-07-30T03:47:41 | Python | UTF-8 | Python | false | false | 1,300 | py | import os
def configuration(parent_package='', top_path=None):
import numpy
from numpy.distutils.misc_util import Configuration
config = Configuration('neighbors', parent_package, top_path)
libraries = []
if os.name == 'posix':
libraries.append('m')
config.add_extension('ball_tree',
sources=['ball_tree.pyx'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_extension('kd_tree',
sources=['kd_tree.pyx'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_extension('dist_metrics',
sources=['dist_metrics.pyx'],
include_dirs=[numpy.get_include(),
os.path.join(numpy.get_include(),
'numpy')],
libraries=libraries)
config.add_extension('typedefs',
sources=['typedefs.pyx'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_subpackage('tests')
return config
| [
"JLiang@esri.com"
] | JLiang@esri.com |
4de48aa71946a71c785722b993d89154566f6338 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/3Numbers_20200713211516.py | 06e4e32ef9e6d865b79ca7c7b17a2315e39b906f | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | def three(str):
strings = str.split('')
print(strings)
three("2hell6o3 wor6l7d2") | [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
b571ec9c876da3b3588aa9f93136372d8a9b9408 | af6b9c19678e6e4f28804aab27cf19e185623efe | /atlcore/doc/__init__.py | 90d6b3484e1906cb7de6c41b683c7c8ea90bea91 | [] | no_license | atlantesoftware/atlcore | 1d265c66c2e541e3234f43ae62cd469cc513308d | f927ac5a576e1c9f1c3a2f68296dab0c41cd093a | refs/heads/master | 2016-09-06T16:59:13.869912 | 2013-05-07T14:49:05 | 2013-05-07T14:49:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | #coding=UTF-8
from atlcore.doc.site import DocSite
docsite = DocSite('docsite', 'DocSite') | [
"hailem@atlantesoftware.com"
] | hailem@atlantesoftware.com |
102e52e69cd85959fcc145cd9158976a012b9b5a | 40132307c631dccbf7aa341eb308f69389715c73 | /OLD/idmt/maya/ZoomWhiteDolphin/zm_batchrender.py | 202ca2a0e7840da7848d6ffed9d438ffb477b17d | [] | no_license | Bn-com/myProj_octv | be77613cebc450b1fd6487a6d7bac991e3388d3f | c11f715996a435396c28ffb4c20f11f8e3c1a681 | refs/heads/master | 2023-03-25T08:58:58.609869 | 2021-03-23T11:17:13 | 2021-03-23T11:17:13 | 348,676,742 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,607 | py | # -*- coding: utf-8 -*-
'''
Created on 2013-8-5
@author: liangyu
'''
import maya.cmds as mc
import maya.mel as mel
def zm_batchFX():
#导入相机
from idmt.maya.py_common import sk_infoConfig
reload(sk_infoConfig)
shotInfo = sk_infoConfig.sk_infoConfig().checkShotInfo()
camFile = '//file-cluster/GDC/Projects/ZoomWhiteDolphin/Project/scenes/Animation/episode_'+shotInfo[1]+'/episode_camera/zm_'+shotInfo[1]+'_'+shotInfo[2]+'_cam.ma'
cam='cam_'+shotInfo[1]+'_'+shotInfo[2]+'_baked'
if mc.ls(cam):
mc.delete(cam)
try:
mc.file(camFile, i=1)
except:
print u'请确保有_bake相机'
mc.error(u'请确保有_bake相机')
#导入渲染文件
from idmt.maya.ZoomWhiteDolphin import zm_AutoRenderLayer_ZoomWhiteDolphin
reload(zm_AutoRenderLayer_ZoomWhiteDolphin)
shot_info=zm_AutoRenderLayer_ZoomWhiteDolphin.zmRLConfig().ReadEXcle()
shot_sence=shot_info[4]
filepath='Z:/Projects/ZoomWhiteDolphin/ZoomWhiteDolphin_Scratch/VFX/waveBeachAutoRender/WavesBeach/'
sign=''
fileslist=mc.getFileList(folder= filepath)
if fileslist:
for list in fileslist:
if list.split(".")[0]==shot_sence:
sign=list.split(".")[0]
if mc.ls('waveBeach'):
mc.delete('waveBeach')
if sign:
objFile='Z:/Projects/ZoomWhiteDolphin/ZoomWhiteDolphin_Scratch/VFX/waveBeachAutoRender/WavesBeach/'+shot_sence+'.mb'
mc.file(objFile, i=1)
else:
print u'请确认文件夹下有相应镜头的场景文件'
#分层
layerName = 'fx_waveBeach'
rlObjs=mc.ls('waveBeach')
print rlObjs
if mc.ls(layerName):
mc.delete(layerName)
if rlObjs:
mc.createRenderLayer(rlObjs, name=layerName, noRecurse=1, makeCurrent=1)
else:
print u'没有渲染物体'
#渲染设置
zm_AutoRenderLayer_ZoomWhiteDolphin.zmRLConfig().zmRLCommonConfig()
zm_AutoRenderLayer_ZoomWhiteDolphin.zmRLConfig().mentalRayProductionLevel()
camShape = mc.listRelatives(mc.ls(cam, type='transform')[0], ni=1, s=1)[0]
if mc.ls(cam, type='transform'):
mc.setAttr((camShape + '.renderable'), 1)
try:
mc.setAttr(('perspShape.renderable'), 0)
except:
pass
#回到MASTER层
mel.eval('editRenderLayerGlobals -currentRenderLayer "defaultRenderLayer"')
mc.setAttr("defaultRenderLayer.renderable", 0)
def zmbatchFXwautoCreate():
print ('=================================================================')
print '====================!!!Start AutoRenderLayer!!!===================='
from idmt.maya.ZoomWhiteDolphin import zm_batchrender
reload(zm_batchrender)
zm_batchrender.zm_batchFX()
# save
from idmt.maya.py_common import sk_infoConfig
reload(sk_infoConfig)
shotInfo = sk_infoConfig.sk_infoConfig().checkShotInfo()
pathLocal = sk_infoConfig.sk_infoConfig().checkRenderLayerLocalPath()
fileName = pathLocal + shotInfo[0] + '_' + shotInfo[1] + '_' + shotInfo[2]
fileType = '_render_ef_c001.mb'
fileName = fileName + fileType
mc.file(rename=fileName)
mc.file(save=1)
mel.eval('zwMusterCheckin2 "" "" 0 0 0;')
print '=======================!!!All Done!!!======================='
print ('===========================================================')
| [
"snakelonely@outlook.com"
] | snakelonely@outlook.com |
594f066131d1f193284788d929b0b60281ba0d9e | 54020c34ed23a086e2bfbfb874a400f3a11293cb | /ST_vars_and_ic.py | 4761d35359cfcd43ec36389e847eb4c71dc719e3 | [] | no_license | ModelDBRepository/187605 | bd26e6b9af407f157a1d9baae42a9d4fe6157a20 | 739605333f3768f9684b217f52a375e6ddd32bb1 | refs/heads/master | 2020-05-29T18:27:58.771998 | 2019-05-31T04:56:54 | 2019-05-31T04:56:54 | 189,300,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,243 | py | # -*- coding: utf-8 -*-
# (c) 2016 - Ilya Prokin - isprokin@gmail.com - https://sites.google.com/site/ilyaprokin
# INRIA Rhone-Alpes
# STDP model : variables' names and initial values
import numpy as np
ST_vars_and_ic = {
'h_caL13': 0.99182137906713796,
'm_caL13': 0.0036977671871038817,
'o_AMPA': 0.0,
'd_AMPA': 0.0,
'o_NMDA': 0.0,
'h_CICR': 0.82466766689469506,
'Ca_cyt': 0.12132718966407073,
'Ca_ER': 63.348087686853646,
'IP3': 0.057291400446753571,
'DAG': 0.005734867663641929,
'DAGLP': 4.1969621599776083e-07,
'twoAG': 3.2085896623941232e-06,
'AEA': 0.0061033848099783438,
'fpre': 1.0,
'I1P': 0.042380592866431144,
'PP1': 0.00093939509311232795,
'V': -69.999016204528218,
'o_CB1R': 3.4373437854140236e-07,
'd_CB1R': 0.002994487796947427
}
NEQ = len(ST_vars_and_ic)+13
CaMKII_ic=np.array([0.23316029213700182, 0.0034298074889746086, 0.00028889779878196254, 0.00013756133483052541, 3.6365976788029681e-05, 4.1274017451676494e-06, 4.2498580055485264e-06, 1.2513942987290664e-07, 3.2696082960591099e-07, 4.5484170099234244e-08, 3.078127923587743e-08, 2.7970211543431621e-09, 1.3221817318283754e-11])
si = lambda x, SK=ST_vars_and_ic.keys(): 13+SK.index(x)
| [
"tom.morse@yale.edu"
] | tom.morse@yale.edu |
a9992f4f561abbeb7842d4945ff18b0ff57b5493 | 0b8575873d696562c8279c9361433d38626bc6cc | /qa/rpc-tests/python-bogcoinrpc/bogcoinrpc/authproxy.py | e686f50b95493df4ad6b8326839167e87f4e7056 | [
"MIT"
] | permissive | colombiacoin/newbogcoin | 1be9b8f432a1281457b33a0313cef8f0127c3ea8 | d36069738e7e514b3f6d8c5b58bd4692b70b1a2e | refs/heads/master | 2020-07-24T15:27:04.311029 | 2019-09-13T12:16:24 | 2019-09-13T12:16:24 | 207,967,982 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,784 | py |
"""
Copyright 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("BogcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
Exception.__init__(self)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return round(o, 8)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
self.__service_url = service_url
self.__service_name = service_name
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
None, None, False,
timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
False, timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self.__service_name is not None:
name = "%s.%s" % (self.__service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name,
json.dumps(args, default=EncodeDecimal)))
postdata = json.dumps({'version': '1.1',
'method': self.__service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
response = self._get_response()
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal)
log.debug("--> "+postdata)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
return self._get_response()
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal)))
else:
log.debug("<-- "+responsedata)
return response
| [
"rishabhshukla@opulasoft.com"
] | rishabhshukla@opulasoft.com |
45202ab545dae9546344b844321a47b93b3fe7ab | 5bc369d49b16bc46e23b76621144223dc4226997 | /model/valueset.py | 204d668f16ce93ac8d577d91ca0b5aedbefd6d55 | [
"MIT"
] | permissive | beda-software/fhir-py-experements | 90d8e802f92f9e691d47d6ea4b33fda47957383a | 363cfb894fa6f971b9be19340cae1b0a3a4377d8 | refs/heads/master | 2022-12-17T05:19:59.294901 | 2020-02-26T03:54:13 | 2020-02-26T03:54:13 | 241,292,789 | 0 | 0 | MIT | 2022-12-08T03:38:55 | 2020-02-18T06:53:02 | Python | UTF-8 | Python | false | false | 6,429 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.1-9346c8cc45 (http://hl7.org/fhir/StructureDefinition/ValueSet) on 2020-02-03.
# 2020, SMART Health IT.
import sys
from dataclasses import dataclass, field
from typing import ClassVar, Optional, List
from .backboneelement import BackboneElement
from .codeableconcept import CodeableConcept
from .coding import Coding
from .contactdetail import ContactDetail
from .domainresource import DomainResource
from .fhirdate import FHIRDate
from .identifier import Identifier
from .usagecontext import UsageContext
@dataclass
class ValueSetExpansionParameter(BackboneElement):
""" Parameter that controlled the expansion process.
A parameter that controlled the expansion process. These parameters may be
used by users of expanded value sets to check whether the expansion is
suitable for a particular purpose, or to pick the correct expansion.
"""
resource_type: ClassVar[str] = "ValueSetExpansionParameter"
name: str = None
valueString: Optional[str] = field(default=None, metadata=dict(one_of_many='value',))
valueBoolean: Optional[bool] = field(default=None, metadata=dict(one_of_many='value',))
valueInteger: Optional[int] = field(default=None, metadata=dict(one_of_many='value',))
valueDecimal: Optional[float] = field(default=None, metadata=dict(one_of_many='value',))
valueUri: Optional[str] = field(default=None, metadata=dict(one_of_many='value',))
valueCode: Optional[str] = field(default=None, metadata=dict(one_of_many='value',))
valueDateTime: Optional[FHIRDate] = field(default=None, metadata=dict(one_of_many='value',))
@dataclass
class ValueSetExpansionContains(BackboneElement):
""" Codes in the value set.
The codes that are contained in the value set expansion.
"""
resource_type: ClassVar[str] = "ValueSetExpansionContains"
system: Optional[str] = None
abstract: Optional[bool] = None
inactive: Optional[bool] = None
version: Optional[str] = None
code: Optional[str] = None
display: Optional[str] = None
designation: Optional[List["ValueSetComposeIncludeConceptDesignation"]] = None
contains: Optional[List["ValueSetExpansionContains"]] = None
@dataclass
class ValueSetComposeIncludeConceptDesignation(BackboneElement):
""" Additional representations for this concept.
Additional representations for this concept when used in this value set -
other languages, aliases, specialized purposes, used for particular
purposes, etc.
"""
resource_type: ClassVar[str] = "ValueSetComposeIncludeConceptDesignation"
language: Optional[str] = None
use: Optional[Coding] = None
value: str = None
@dataclass
class ValueSetComposeIncludeConcept(BackboneElement):
""" A concept defined in the system.
Specifies a concept to be included or excluded.
"""
resource_type: ClassVar[str] = "ValueSetComposeIncludeConcept"
code: str = None
display: Optional[str] = None
designation: Optional[List[ValueSetComposeIncludeConceptDesignation]] = None
@dataclass
class ValueSetComposeIncludeFilter(BackboneElement):
""" Select codes/concepts by their properties (including relationships).
Select concepts by specify a matching criterion based on the properties
(including relationships) defined by the system, or on filters defined by
the system. If multiple filters are specified, they SHALL all be true.
"""
resource_type: ClassVar[str] = "ValueSetComposeIncludeFilter"
property: str = None
op: str = None
value: str = None
@dataclass
class ValueSetComposeInclude(BackboneElement):
""" Include one or more codes from a code system or other value set(s).
"""
resource_type: ClassVar[str] = "ValueSetComposeInclude"
system: Optional[str] = None
version: Optional[str] = None
concept: Optional[List[ValueSetComposeIncludeConcept]] = None
filter: Optional[List[ValueSetComposeIncludeFilter]] = None
valueSet: Optional[List[str]] = None
@dataclass
class ValueSetCompose(BackboneElement):
""" Content logical definition of the value set (CLD).
A set of criteria that define the contents of the value set by including or
excluding codes selected from the specified code system(s) that the value
set draws from. This is also known as the Content Logical Definition (CLD).
"""
resource_type: ClassVar[str] = "ValueSetCompose"
lockedDate: Optional[FHIRDate] = None
inactive: Optional[bool] = None
include: List[ValueSetComposeInclude] = field(default_factory=list)
exclude: Optional[List[ValueSetComposeInclude]] = None
@dataclass
class ValueSetExpansion(BackboneElement):
""" Used when the value set is "expanded".
A value set can also be "expanded", where the value set is turned into a
simple collection of enumerated codes. This element holds the expansion, if
it has been performed.
"""
resource_type: ClassVar[str] = "ValueSetExpansion"
identifier: Optional[str] = None
timestamp: FHIRDate = None
total: Optional[int] = None
offset: Optional[int] = None
parameter: Optional[List[ValueSetExpansionParameter]] = None
contains: Optional[List[ValueSetExpansionContains]] = None
@dataclass
class ValueSet(DomainResource):
""" A set of codes drawn from one or more code systems.
A ValueSet resource instance specifies a set of codes drawn from one or
more code systems, intended for use in a particular context. Value sets
link between [CodeSystem](codesystem.html) definitions and their use in
[coded elements](terminologies.html).
"""
resource_type: ClassVar[str] = "ValueSet"
url: Optional[str] = None
identifier: Optional[List[Identifier]] = None
version: Optional[str] = None
name: Optional[str] = None
title: Optional[str] = None
status: str = None
experimental: Optional[bool] = None
date: Optional[FHIRDate] = None
publisher: Optional[str] = None
contact: Optional[List[ContactDetail]] = None
description: Optional[str] = None
useContext: Optional[List[UsageContext]] = None
jurisdiction: Optional[List[CodeableConcept]] = None
immutable: Optional[bool] = None
purpose: Optional[str] = None
copyright: Optional[str] = None
compose: Optional[ValueSetCompose] = None
expansion: Optional[ValueSetExpansion] = None | [
"ir4y.ix@gmail.com"
] | ir4y.ix@gmail.com |
f904c5c7289553d7bb7d0f03975aa6101230c77f | bfbe642d689b5595fc7a8e8ae97462c863ba267a | /bin/Python27/Lib/site-packages/scipy-0.14.0-py2.7-win32.egg/scipy/sparse/linalg/isolve/tests/test_lsmr.py | 519d4498fffe9e1262b2432c530da07a2eddc333 | [
"MIT",
"LicenseRef-scancode-other-permissive"
] | permissive | mcanthony/meta-core | 0c0a8cde1669f749a4880aca6f816d28742a9c68 | 3844cce391c1e6be053572810bad2b8405a9839b | refs/heads/master | 2020-12-26T03:11:11.338182 | 2015-11-04T22:58:13 | 2015-11-04T22:58:13 | 45,806,011 | 1 | 0 | null | 2015-11-09T00:34:22 | 2015-11-09T00:34:22 | null | UTF-8 | Python | false | false | 4,754 | py | """
Copyright (C) 2010 David Fong and Michael Saunders
Distributed under the same license as Scipy
Testing Code for LSMR.
03 Jun 2010: First version release with lsmr.py
David Chin-lung Fong clfong@stanford.edu
Institute for Computational and Mathematical Engineering
Stanford University
Michael Saunders saunders@stanford.edu
Systems Optimization Laboratory
Dept of MS&E, Stanford University.
"""
from __future__ import division, print_function, absolute_import
from numpy import arange, eye, zeros, ones, sqrt, transpose, hstack
from numpy.linalg import norm
from numpy.testing import run_module_suite, assert_almost_equal
from scipy.sparse import coo_matrix
from scipy.sparse.linalg.interface import aslinearoperator
from scipy.sparse.linalg import lsmr
class TestLSMR:
def setUp(self):
self.n = 10
self.m = 10
def assertCompatibleSystem(self, A, xtrue):
Afun = aslinearoperator(A)
b = Afun.matvec(xtrue)
x = lsmr(A, b)[0]
assert_almost_equal(norm(x - xtrue), 0, decimal=5)
def testIdentityACase1(self):
A = eye(self.n)
xtrue = zeros((self.n, 1))
self.assertCompatibleSystem(A, xtrue)
def testIdentityACase2(self):
A = eye(self.n)
xtrue = ones((self.n,1))
self.assertCompatibleSystem(A, xtrue)
def testIdentityACase3(self):
A = eye(self.n)
xtrue = transpose(arange(self.n,0,-1))
self.assertCompatibleSystem(A, xtrue)
def testBidiagonalA(self):
A = lowerBidiagonalMatrix(20,self.n)
xtrue = transpose(arange(self.n,0,-1))
self.assertCompatibleSystem(A,xtrue)
class TestLSMRReturns:
def setUp(self):
self.n = 10
self.A = lowerBidiagonalMatrix(20,self.n)
self.xtrue = transpose(arange(self.n,0,-1))
self.Afun = aslinearoperator(self.A)
self.b = self.Afun.matvec(self.xtrue)
self.returnValues = lsmr(self.A,self.b)
def testNormr(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normr, norm(self.b - self.Afun.matvec(x)))
def testNormar(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normar,
norm(self.Afun.rmatvec(self.b - self.Afun.matvec(x))))
def testNormx(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normx, norm(x))
def lowerBidiagonalMatrix(m, n):
# This is a simple example for testing LSMR.
# It uses the leading m*n submatrix from
# A = [ 1
# 1 2
# 2 3
# 3 4
# ...
# n ]
# suitably padded by zeros.
#
# 04 Jun 2010: First version for distribution with lsmr.py
if m <= n:
row = hstack((arange(m, dtype=int),
arange(1, m, dtype=int)))
col = hstack((arange(m, dtype=int),
arange(m-1, dtype=int)))
data = hstack((arange(1, m+1, dtype=float),
arange(1,m, dtype=float)))
return coo_matrix((data, (row, col)), shape=(m,n))
else:
row = hstack((arange(n, dtype=int),
arange(1, n+1, dtype=int)))
col = hstack((arange(n, dtype=int),
arange(n, dtype=int)))
data = hstack((arange(1, n+1, dtype=float),
arange(1,n+1, dtype=float)))
return coo_matrix((data,(row, col)), shape=(m,n))
def lsmrtest(m, n, damp):
"""Verbose testing of lsmr"""
A = lowerBidiagonalMatrix(m,n)
xtrue = arange(n,0,-1, dtype=float)
Afun = aslinearoperator(A)
b = Afun.matvec(xtrue)
atol = 1.0e-7
btol = 1.0e-7
conlim = 1.0e+10
itnlim = 10*n
show = 1
x, istop, itn, normr, normar, norma, conda, normx \
= lsmr(A, b, damp, atol, btol, conlim, itnlim, show)
j1 = min(n,5)
j2 = max(n-4,1)
print(' ')
print('First elements of x:')
str = ['%10.4f' % (xi) for xi in x[0:j1]]
print(''.join(str))
print(' ')
print('Last elements of x:')
str = ['%10.4f' % (xi) for xi in x[j2-1:]]
print(''.join(str))
r = b - Afun.matvec(x)
r2 = sqrt(norm(r)**2 + (damp*norm(x))**2)
print(' ')
str = 'normr (est.) %17.10e' % (normr)
str2 = 'normr (true) %17.10e' % (r2)
print(str)
print(str2)
print(' ')
if __name__ == "__main__":
# Comment out the next line to run unit tests only
lsmrtest(20,10,0)
run_module_suite()
| [
"kevin.m.smyth@gmail.com"
] | kevin.m.smyth@gmail.com |
ba29ed82314cae6ed3ee61c684bfe1f5a68b82f6 | 5ca5a7120c3c147b3ae86c2271c60c82745997ea | /my_selenium/web_driver_three/data_frame/pageObjects/LoginPage.py | ba2b5ecb7dc39b416962ddc7f40bc3d108534d64 | [] | no_license | JR1QQ4/auto_test | 6b9ea7bd317fd4338ac0964ffd4042b293640af3 | 264b991b4dad72986e2aeb1a30812baf74e42bc6 | refs/heads/main | 2023-03-21T01:32:29.192030 | 2021-03-16T14:07:11 | 2021-03-16T14:07:11 | 321,591,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,437 | py | #!/usr/bin/python
# -*- coding:utf-8 -*-
from my_selenium.web_driver_three.data_frame.util.ObjectMap import *
from my_selenium.web_driver_three.data_frame.util.parse_configuration_file import ParseConfigFile
from selenium import webdriver
class LoginPage(object):
def __init__(self, driver: webdriver):
self.driver = driver
self.parseCF = ParseConfigFile()
self.loginOPtions = self.parseCF.getItemsSection("163mail_login")
# print(self.loginOPtions)
def switchToFrame(self):
try:
locate_type, locator = self.loginOPtions["loginPage.frame".lower()].split(">")
# frame = get_element(self.driver, 'css selector', 'iframe[id^="x-URS-iframe"]')
frame = get_element(self.driver, locate_type, locator)
self.driver.switch_to.frame(frame)
except Exception as e:
raise e
def switchToDefaultFrame(self):
self.driver.switch_to.default_content()
def userNameObj(self):
try:
locate_type, locator = self.loginOPtions["loginPage.username".lower()].split(">")
# elementObj = get_element(self.driver, "xpath", '//input[@name="email"]')
elementObj = get_element(self.driver, locate_type, locator)
return elementObj
except Exception as e:
raise e
def passwordObj(self):
try:
locate_type, locator = self.loginOPtions["loginPage.password".lower()].split(">")
elementObj = get_element(self.driver, locate_type, locator)
return elementObj
except Exception as e:
raise e
def loginButton(self):
try:
locate_type, locator = self.loginOPtions["loginPage.loginButton".lower()].split(">")
elementObj = get_element(self.driver, locate_type, locator)
return elementObj
except Exception as e:
raise e
# if __name__ == '__main__':
# from selenium import webdriver
# driver = webdriver.Chrome(executable_path=r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe')
# driver.get("https://mail.163.com/")
# import time
# time.sleep(5)
# login = LoginPage(driver)
# login.switchToFrame()
# login.userNameObj().send_keys("")
# login.passwordObj().send_keys("")
# login.loginButton().click()
# login.switchToDefaultFrame()
# time.sleep(5)
# driver.quit()
| [
"chenjunrenyx@163.com"
] | chenjunrenyx@163.com |
20234811a48b9100c4a4d892de7cbdf8b671601c | fc3f784c8d00f419b11cbde660fe68a91fb080ca | /algoritm/20상반기 코딩테스트/네트워크연결/bj3780.py | 81a9c28d91d9f9bda43b57b0f9ea4cab4770f1c5 | [] | no_license | choo0618/TIL | 09f09c89c8141ba75bf92657ac39978913703637 | 70437a58015aecee8f3d86e6bfd0aa8dc11b5447 | refs/heads/master | 2021-06-25T07:01:34.246642 | 2020-12-21T04:57:13 | 2020-12-21T04:57:13 | 163,782,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | import sys
sys.stdin = open('bj3780.txt','r')
def find(x):
if P[x]==x:return Dis[x]
Dis[x]+=find(P[x])
P[x]=P[P[x]]
return Dis[x]
T=int(input())
for t in range(T):
N=int(input())
P=list(range(N+1))
Dis=[0]*(N+1)
while True:
L=list(map(str,input().split()))
if len(L)==1:break
if L[0]=='E':print(find(int(L[1])))
else:
a,b=int(L[1]),int(L[2])
Dis[a]=abs(a-b)%1000
P[a]=b
| [
"choo0618@naver.com"
] | choo0618@naver.com |
78b0a50ab9ca8a2b3bdda54f39c9760a8e3ba0f5 | cda2c95ee6167a34ce9ba3ea25707469a2c357ca | /neural_augmented_simulator/old-code/data-collection/3-collect_mujoco_episodes_cheetah.py | 4496daf7cbcffb9a7fc72bdaaf3d5687de2c0805 | [] | no_license | fgolemo/neural-augmented-simulator | f43d9a88187fbef478aba9b4399eaa59d8795746 | eb02f20d92e6775824dbac221771f8b8c6dda582 | refs/heads/master | 2020-06-23T15:50:35.957495 | 2020-03-06T21:16:34 | 2020-03-06T21:16:34 | 198,666,041 | 0 | 1 | null | 2019-12-04T22:19:27 | 2019-07-24T15:49:48 | Jupyter Notebook | UTF-8 | Python | false | false | 5,037 | py | #import torch
import math
import h5py
from fuel.datasets.hdf5 import H5PYDataset
import gym
import gym_throwandpush
import numpy as np
from scipy.misc import imresize
from utils.buffer_images import BufferImages as Buffer
import matplotlib.pyplot as plt
from tqdm import tqdm
from hyperdash import Experiment
env = gym.make('HalfCheetah2Pixel-v0')
env2 = gym.make('HalfCheetah2Pixel-v0')
env2.env.env._init( # real robot
torques={
"bthigh": 120,
"bshin": 90,
"bfoot": 60,
"fthigh": 120,
"fshin": 60,
"ffoot": 30
},
colored=False
)
env.env.env._init( # simulator
torques={
"bthigh": 600,
"bshin": 18,
"bfoot": 300,
"fthigh": 24,
"fshin": 300,
"ffoot": 6
},
colored=True
)
image_dim = (128, 128, 3)
observation_dim = int(env.observation_space[0].shape[0])
action_dim = int(env.action_space.shape[0])
print ("obs dim: {}, act dim: {}".format(observation_dim, action_dim))
rng = np.random.RandomState(seed=22)
max_steps = 1000
episode_length = 300
split = 0.90
action_steps = 5
# Creating the h5 dataset
name = '/Tmp/mujoco_data1_cheetah.h5'
assert 0 < split <= 1
size_train = math.floor(max_steps * split)
size_val = math.ceil(max_steps * (1 - split))
f = h5py.File(name, mode='w')
images = f.create_dataset('images', (size_train+size_val, episode_length) + image_dim, dtype='uint8')
observations = f.create_dataset('obs', (size_train+size_val, episode_length, observation_dim), dtype='float32')
actions = f.create_dataset('actions', (size_train+size_val, episode_length, action_dim), dtype='float32')
s_transition_img = f.create_dataset('s_transition_img', (size_train+size_val, episode_length) + image_dim, dtype='uint8')
r_transition_img = f.create_dataset('r_transition_img', (size_train+size_val, episode_length) + image_dim, dtype='uint8')
s_transition_obs = f.create_dataset('s_transition_obs', (size_train+size_val, episode_length, observation_dim), dtype='float32')
r_transition_obs = f.create_dataset('r_transition_obs', (size_train+size_val, episode_length, observation_dim), dtype='float32')
reward_sim = f.create_dataset('reward_sim', (size_train+size_val,episode_length), dtype='float32')
reward_real = f.create_dataset('reward_real', (size_train+size_val,episode_length), dtype='float32')
split_dict = {
'train': {
'images': (0, size_train),
'obs': (0, size_train),
'actions': (0, size_train),
's_transition_img': (0, size_train),
'r_transition_img': (0, size_train),
's_transition_obs': (0, size_train),
'r_transition_obs': (0, size_train),
'reward_sim': (0, size_train),
'reward_real': (0, size_train)
},
'valid': {
'images': (size_train, size_train+size_val),
'obs': (size_train, size_train+size_val),
'actions': (size_train, size_train+size_val),
's_transition_img': (size_train, size_train+size_val),
'r_transition_img': (size_train, size_train+size_val),
's_transition_obs': (size_train, size_train+size_val),
'r_transition_obs': (size_train, size_train+size_val),
'reward_sim': (size_train, size_train+size_val),
'reward_real': (size_train, size_train+size_val),
}
}
f.attrs['split'] = H5PYDataset.create_split_array(split_dict)
def match_env(ev1, ev2):
# set env1 (simulator) to that of env2 (real robot)
ev1.env.env.set_state(
ev2.env.env.model.data.qpos.ravel(),
ev2.env.env.model.data.qvel.ravel()
)
i = 0
exp = Experiment("dataset cheetah")
for i in tqdm(range(max_steps)):
exp.metric("episode", i)
obs = env.reset()
obs2 = env2.reset()
match_env(env, env2)
for j in range(episode_length):
# env.render()
# env2.render()
if j % action_steps == 0:
action = env.action_space.sample()
new_obs, reward, done, info = env.step(action)
new_obs2, reward2, done2, info2 = env2.step(action)
# print (j, done, new_obs[0][0])
images[i, j, :, :, :] = imresize(obs[1], [128, 128, 3])
observations[i, j, :] = obs[0]
actions[i, j, :] = action
s_transition_img[i, j, :, :, :] = imresize(new_obs[1], [128, 128, 3])
r_transition_img[i, j, :, :, :] = imresize(new_obs2[1], [128, 128, 3])
s_transition_obs[i, j, :] = new_obs[0]
r_transition_obs[i, j, :] = new_obs2[0]
reward_sim[i] = reward
reward_real[i] = reward2
# we have to set the state to be the old state in the next timestep.
# Otherwise the old state is constant
obs = new_obs
match_env(env, env2)
if done2:
# print("Episode finished after {} timesteps".format(t+1))
break
if i % 200 == 0:
print("Buffer currently filled at: {}%".format(int(i*100./max_steps)))
if i % 100 == 0:
print ("{} done".format(i))
f.flush()
f.flush()
f.close()
print('Created h5 dataset with {} elements'.format(max_steps))
| [
"fgolemo@gmail.com"
] | fgolemo@gmail.com |
1f076e31be7002240dc6cc7edc06f527821ec557 | de4d88db6ea32d20020c169f734edd4b95c3092d | /aiotdlib/api/functions/set_custom_language_pack.py | dd86e04e684d5f47e8ed95dcffb27b3d5a653339 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | thiagosm/aiotdlib | 5cc790a5645f7e4cc61bbd0791433ed182d69062 | 4528fcfca7c5c69b54a878ce6ce60e934a2dcc73 | refs/heads/main | 2023-08-15T05:16:28.436803 | 2021-10-18T20:41:27 | 2021-10-18T20:41:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py | # =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import LanguagePackInfo
from ..types import LanguagePackString
class SetCustomLanguagePack(BaseObject):
"""
Adds or changes a custom local language pack to the current localization target
:param info: Information about the language pack. Language pack ID must start with 'X', consist only of English letters, digits and hyphens, and must not exceed 64 characters. Can be called before authorization
:type info: :class:`LanguagePackInfo`
:param strings: Strings of the new language pack
:type strings: :class:`list[LanguagePackString]`
"""
ID: str = Field("setCustomLanguagePack", alias="@type")
info: LanguagePackInfo
strings: list[LanguagePackString]
@staticmethod
def read(q: dict) -> SetCustomLanguagePack:
return SetCustomLanguagePack.construct(**q)
| [
"pylakey@protonmail.com"
] | pylakey@protonmail.com |
2be4b98c022d59fb74c3e543370f9f90e68cf785 | d28f0c984bbd4f80b770259ceb884f38e1cc0db8 | /bin/find-image-dirs.py | 54be7c595d65e0f9836b50b9be5cb1327028f05c | [] | no_license | cloudmesh-community/book | 435a96e2a8d2b8438428880b5f39a56d2c1c63b7 | f971bc1d15c82375d4c765b33c6fbe383b5819de | refs/heads/main | 2022-11-09T16:57:26.495231 | 2022-10-27T21:27:37 | 2022-10-27T21:27:37 | 135,431,595 | 29 | 134 | null | 2022-02-01T02:04:03 | 2018-05-30T11:14:59 | Jupyter Notebook | UTF-8 | Python | false | false | 494 | py | #!/usr/bin/env python
import sys
import glob
import os
directories = [".", "chapters"]
for root, dirs, files in os.walk("../chapters", topdown=False):
for name in dirs:
path = os.path.join(root, name)
if 'images' in path:
path = os.path.dirname(path)
# hack to remove ../ should in future use pathlib no time to implement
path = path.replace("../","")
directories.append(path)
print (":".join(directories))
| [
"laszewski@gmail.com"
] | laszewski@gmail.com |
c6cf2696635c77f6477ce3791478441990c3096c | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/offazure/v20200707/private_endpoint_connection.py | 2740aefae3714c0b2677e411cc0f31aa9e37e0e9 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,553 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = ['PrivateEndpointConnectionArgs', 'PrivateEndpointConnection']
@pulumi.input_type
class PrivateEndpointConnectionArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
site_name: pulumi.Input[str],
pe_connection_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a PrivateEndpointConnection resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] site_name: Site name.
:param pulumi.Input[str] pe_connection_name: Private link resource name.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "site_name", site_name)
if pe_connection_name is not None:
pulumi.set(__self__, "pe_connection_name", pe_connection_name)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="siteName")
def site_name(self) -> pulumi.Input[str]:
"""
Site name.
"""
return pulumi.get(self, "site_name")
@site_name.setter
def site_name(self, value: pulumi.Input[str]):
pulumi.set(self, "site_name", value)
@property
@pulumi.getter(name="peConnectionName")
def pe_connection_name(self) -> Optional[pulumi.Input[str]]:
"""
Private link resource name.
"""
return pulumi.get(self, "pe_connection_name")
@pe_connection_name.setter
def pe_connection_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pe_connection_name", value)
class PrivateEndpointConnection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
pe_connection_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
site_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
REST model used to encapsulate the user visible state of a PrivateEndpoint.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] pe_connection_name: Private link resource name.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] site_name: Site name.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PrivateEndpointConnectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
REST model used to encapsulate the user visible state of a PrivateEndpoint.
:param str resource_name: The name of the resource.
:param PrivateEndpointConnectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PrivateEndpointConnectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
pe_connection_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
site_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PrivateEndpointConnectionArgs.__new__(PrivateEndpointConnectionArgs)
__props__.__dict__["pe_connection_name"] = pe_connection_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if site_name is None and not opts.urn:
raise TypeError("Missing required property 'site_name'")
__props__.__dict__["site_name"] = site_name
__props__.__dict__["e_tag"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:offazure:PrivateEndpointConnection")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PrivateEndpointConnection, __self__).__init__(
'azure-native:offazure/v20200707:PrivateEndpointConnection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PrivateEndpointConnection':
"""
Get an existing PrivateEndpointConnection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PrivateEndpointConnectionArgs.__new__(PrivateEndpointConnectionArgs)
__props__.__dict__["e_tag"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
return PrivateEndpointConnection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> pulumi.Output[str]:
"""
Gets the tag for optimistic concurrency control.
"""
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Gets the name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.PrivateEndpointConnectionPropertiesResponse']:
"""
Gets the properties of the object.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Gets the resource type.
"""
return pulumi.get(self, "type")
| [
"noreply@github.com"
] | bpkgoud.noreply@github.com |
a7ddf9fb5c1a07cb43829f558c8bec12271f05fa | 2391ff81d33e05efb1f38f7a67eff13dbb6a7b23 | /open_seq2seq/model/slstm.py | a93beee9507c8799bf6946c38afd75c4d0d30998 | [
"MIT"
] | permissive | matanhs/OpenSeq2Seq | 3b79222e114db75c1bf0f6b4b1eb0d231b8f0d0b | 540a1a230eff7c4cefcbb094ddc65aa11f64c9b3 | refs/heads/master | 2021-08-31T01:49:01.519514 | 2017-12-20T02:43:24 | 2017-12-20T04:47:16 | 113,621,881 | 1 | 0 | null | 2017-12-08T22:10:20 | 2017-12-08T22:10:20 | null | UTF-8 | Python | false | false | 6,106 | py | """Implement https://arxiv.org/abs/1709.02755
Copy from LSTM, and make it functionally correct with minimum code change
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import rnn_cell
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
_BIAS_VARIABLE_NAME = "biases" if tf.__version__ < "1.2.0" else "bias"
_WEIGHTS_VARIABLE_NAME = "weights" if tf.__version__ < "1.2.0" else "kernel"
class BasicSLSTMCell(rnn_cell.RNNCell):
"""Basic SLSTM recurrent network cell.
The implementation is based on: https://arxiv.org/abs/1709.02755.
"""
def __init__(self, num_units, forget_bias=1.0,
state_is_tuple=True, activation=None, reuse=None):
"""Initialize the basic SLSTM cell.
Args:
num_units: int, The number of units in the SLSTM cell.
forget_bias: float, The bias added to forget gates (see above).
Must set to `0.0` manually when restoring from CudnnLSTM-trained
checkpoints.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. The latter behavior will soon be deprecated.
activation: Activation function of the inner states. Default: `tanh`.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
"""
super(BasicSLSTMCell, self).__init__(_reuse=reuse)
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
self._num_units = num_units
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
self._activation = activation or math_ops.tanh
@property
def state_size(self):
return (rnn_cell.LSTMStateTuple(self._num_units, self._num_units)
if self._state_is_tuple else 2 * self._num_units)
@property
def output_size(self):
return self._num_units
def call(self, inputs, state):
"""Long short-term memory cell (LSTM).
Args:
inputs: `2-D` tensor with shape `[batch_size x input_size]`.
state: An `LSTMStateTuple` of state tensors, each shaped
`[batch_size x self.state_size]`, if `state_is_tuple` has been set to
`True`. Otherwise, a `Tensor` shaped
`[batch_size x 2 * self.state_size]`.
Returns:
A pair containing the new hidden state, and the new state (either a
`LSTMStateTuple` or a concatenated state, depending on
`state_is_tuple`).
"""
sigmoid = math_ops.sigmoid
# Parameters of gates are concatenated into one multiply for efficiency.
if self._state_is_tuple:
c, h = state
else:
c, h = array_ops.split(value=state, num_or_size_splits=2, axis=1)
#concat = _linear([inputs, h], 4 * self._num_units, True)
concat = _linear(inputs, 4 * self._num_units, True)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = array_ops.split(value=concat, num_or_size_splits=4, axis=1)
new_c = (
c * sigmoid(f + self._forget_bias) + sigmoid(i) * self._activation(j))
new_h = self._activation(new_c) * sigmoid(o)
if self._state_is_tuple:
new_state = rnn_cell.LSTMStateTuple(new_c, new_h)
else:
new_state = array_ops.concat([new_c, new_h], 1)
return new_h, new_state
def _linear(args,
output_size,
bias,
bias_initializer=None,
kernel_initializer=None):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch x n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_initializer: starting value to initialize the bias
(default is all zeros).
kernel_initializer: starting value to initialize the weight.
Returns:
A 2D Tensor with shape [batch x output_size] equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape[1].value is None:
raise ValueError("linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
_WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size],
dtype=dtype,
initializer=kernel_initializer)
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
else:
res = math_ops.matmul(array_ops.concat(args, 1), weights)
if not bias:
return res
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
if bias_initializer is None:
bias_initializer = init_ops.constant_initializer(0.0, dtype=dtype)
biases = vs.get_variable(
_BIAS_VARIABLE_NAME, [output_size],
dtype=dtype,
initializer=bias_initializer)
return nn_ops.bias_add(res, biases)
| [
"okuchaiev@nvidia.com"
] | okuchaiev@nvidia.com |
bdba48f7601d0424df259c6617119b28ae6ad279 | e5202e0f36c15b8898920a461a866168fa059947 | /clirad/co2_0.0008/band_7/atmpro_trp/cliradlw_523cbb7/param.py | a8f164cd3c18dc90ee4a60ce1b07bfc7921db990 | [] | no_license | qAp/analysis_-_new_kdist_param | 653c9873751646f6fa9481544e98ed6065a16155 | 272dc3667030cdb18664108d0bd78fee03736144 | refs/heads/master | 2021-06-11T04:21:35.105924 | 2019-08-04T13:13:07 | 2019-08-04T13:13:07 | 136,108,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | PARAM = {'commitnumber': '523cbb7', 'molecule': {'co2': 0.0008}, 'band': [7], 'atmpro': 'trp', 'tsfc': 300}
PARAM_LBLNEW = {'atmpro': 'trp', 'band': '5', 'commitnumber': 'a22ab94', 'conc': 0.0008, 'dv': 0.001, 'klin': 6.5e-24, 'molecule': 'co2', 'ng_adju': [0, 0], 'ng_refs': [1, 2], 'nv': 1000, 'option_compute_btable': 0, 'option_compute_ktable': 0, 'option_wgt_flux': 1, 'option_wgt_k': 1, 'ref_pts': [(1, 250), (500, 250)], 'tsfc': 300, 'vmax': 1100, 'vmin': 980, 'w_diffuse': [(1.75,), (1.66, 1.9)], 'wgt': [(0.75,), (0.75, 0.95)]} | [
"llacque@gmail.com"
] | llacque@gmail.com |
1e067717037ea679c74fa4d55b9080f7baba735a | bade79e88dd32b42b4841a1e1a94eddfc86652bf | /communication/communicationWindow.py | b1145de35a61d32076eb03a2f4d120f36de0285e | [] | no_license | a452669850/DCSNEW | 927171b10f455b396b50e8400001efcdb5fd1217 | 7578b6b18e021108c3e8b31c5dab2a17ac16a79d | refs/heads/master | 2023-01-04T11:25:09.787625 | 2020-10-20T13:16:54 | 2020-10-20T13:16:54 | 305,712,694 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,057 | py | import sys
from pathlib import Path
sys.path.append('D:\\dcstms')
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QApplication, QMainWindow, QMdiArea, QHBoxLayout
from communication import skio, iomapping
from communication.view.IntermediateVariable import intermediateVarWindow
from communication.view.databaseManagement import databaseManageWindow
from communication.view.deviceVariables import deviceVarWindow
from communication.view.myTree import TreeDockWidget
from communication.view.systemParameter import sysParameterWindow
path = Path(__file__).absolute().parent.parent.joinpath('static')
class comWindow(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle('Dcs自动化测试软件-[工程管理器]')
skio.setup(path.joinpath('demo'))
iomapping.setup_Current()
self.createMenue()
self.initUI()
def initUI(self):
layout = QHBoxLayout()
self.items = TreeDockWidget(self)
self.items.tree_Signal.connect(self.windowAction)
self.items.setFloating(False)
self.mdi = QMdiArea()
self.setCentralWidget(self.mdi)
self.addDockWidget(Qt.LeftDockWidgetArea, self.items)
self.setLayout(layout)
def createMenue(self):
self.menubar = self.menuBar()
self.viewMenu1 = self.menubar.addMenu('&工程')
self.viewMenu2 = self.menubar.addMenu('&查看')
self.viewMenu3 = self.menubar.addMenu('&工具')
self.viewMenu4 = self.menubar.addMenu('&操作')
self.viewMenu5 = self.menubar.addMenu('&帮助')
self.viewMenu1.addAction('新建工程')
self.viewMenu1.addAction('打开')
self.viewMenu1.addAction('保存')
self.viewMenu1.addAction('退出')
self.viewMenu2.addAction('工具栏')
self.viewMenu2.addAction('状态栏')
self.viewMenu2.addAction('工作区')
self.viewMenu2.addAction('显示区')
self.viewMenu2.addAction('编辑')
self.viewMenu3.addAction('模拟')
self.viewMenu3.addAction('运行')
self.viewMenu3.addAction('下载工程')
self.viewMenu3.addAction('上传工程')
self.viewMenu3.addAction('标准modbus点表')
self.viewMenu3.addAction('模板导入')
self.viewMenu4.addAction('增加')
self.viewMenu4.addAction('追加')
self.viewMenu4.addAction('行拷')
self.viewMenu4.addAction('列拷')
self.viewMenu4.addAction('修改')
self.viewMenu4.addAction('删除')
self.viewMenu4.addAction('导出')
self.viewMenu4.addAction('导入')
self.viewMenu5.addAction('帮助')
self.viewMenu5.addAction('关于')
self.viewMenu1.triggered.connect(self.menueAction1)
self.viewMenu2.triggered.connect(self.menueAction2)
self.viewMenu3.triggered.connect(self.menueAction3)
self.viewMenu4.triggered.connect(self.menueAction4)
self.viewMenu5.triggered.connect(self.menueAction5)
def windowAction(self, text):
if len(self.mdi.subWindowList()) < 1:
if text == 'sysParameter':
sub = sysParameterWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'comEqu':
sub = deviceVarWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'deviceVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'intermediateVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'databaseManage':
sub = databaseManageWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if len(self.mdi.subWindowList()) == 1:
self.mdi.subWindowList()[0].close()
if text == 'sysParameter':
sub = sysParameterWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'comEqu':
sub = deviceVarWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'deviceVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'intermediateVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'databaseManage':
sub = databaseManageWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
# winDict = {
# 'sysParameter': sysParameterWindow(),
# 'comEqu': deviceVarWindow(),
# 'deviceVar': intermediateVarWindow(),
# 'intermediateVar': intermediateVarWindow(),
# 'databaseManage': databaseManageWindow()
# }
# if len(self.mdi.subWindowList()) < 1:
# sub = winDict[text]
# if hasattr(sub, 'threadings'):
# print(1)
# self.mdi.addSubWindow(sub)
# sub.showMaximized()
# if len(self.mdi.subWindowList()) == 1:
# self.mdi.subWindowList()[0].close()
# sub = winDict[text]
# self.mdi.addSubWindow(sub)
# sub.showMaximized()
def menueAction1(self):
print(1)
def menueAction2(self):
print(2)
def menueAction3(self):
print(3)
def menueAction4(self):
print(4)
def menueAction5(self):
print(5)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = comWindow()
ex.show()
sys.exit(app.exec_())
| [
"a452669850@outlook.com"
] | a452669850@outlook.com |
0d6be0c4188a1d84cc9f5b5b16671e49c1e36a28 | eb66f122da246af7e5f342c5a3be3a02d0957d04 | /oldBoy/upload_client.py | b80d3719e98c46debe6404bff1dbc93271e267d8 | [] | no_license | zhchwolf/pylearn | 00838be6f5f68176ba3d0131f223839469a16a55 | 5dc2d39a90dd064ac88669d4535ad08b082bf04f | refs/heads/master | 2022-08-27T03:25:10.854780 | 2019-09-27T09:38:47 | 2019-09-27T09:38:47 | 157,136,924 | 0 | 1 | null | 2022-08-06T05:22:44 | 2018-11-12T00:56:12 | Python | UTF-8 | Python | false | false | 690 | py | import socket
import os
sk = socket.socket()
address =('127.0.0.1',8000)
sk.connect(address)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
while True:
inp = input('>>>') # post|123.jpg
cmd,path = inp.split('|')
path = os.path.join(BASE_DIR,path)
filename = os.path.basename(path)
file_size = os.stat(path).st_size
file_info = 'post|%s|%s' % (filename,file_size)
sk.sendall(bytes(file_info,'utf8'))
with open(path,'rb') as f:
has_send = 0
while has_send != file_size:
data = f.read(1024)
sk.sendall(data)
has_send += len(data)
f.close()
print('upload finished.')
sk.close() | [
"zhchwolf@sina.com"
] | zhchwolf@sina.com |
47afd7e643d3c1fb17daceaa3e205522b12cb9b4 | 13ce655f82b93fb4089b29e62a8e33dd7ff05493 | /src/wai/json/error/_RequiredDisallowed.py | 5305a58f8080b5fe1c0cc7bf695504fde72de1f4 | [
"MIT"
] | permissive | waikato-datamining/wai-json | 603b90b13155114bbfb60b40f45100248c03d710 | cb013fb16e7c1b8d91e040a387a143d29d4ced96 | refs/heads/master | 2021-01-07T15:06:22.957223 | 2020-03-17T23:59:14 | 2020-03-17T23:59:14 | 241,736,670 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 211 | py | from ._JSONPropertyError import JSONPropertyError
class RequiredDisallowed(JSONPropertyError):
"""
Error type for when trying to set a property as required,
but it is not allowed.
"""
pass
| [
"coreytsterling@gmail.com"
] | coreytsterling@gmail.com |
1c028682fb08f01c09007ae94fdf3bbee4bb88aa | 6612fcbf2d336ac98c03e1902496b8728b0d2cda | /kitsune/users/migrations/0010_auto_20151110_1307.py | 0e414f0bd7be9ca8ca10c9222a77ec3bd56c0545 | [] | permissive | ron813c/kitsune | 00af75780aa5976e0122b84c9018754f123ed0bb | 7ff783784084bddcb75fe38602f3a17d8f8c399c | refs/heads/master | 2022-11-05T12:58:49.097404 | 2020-06-13T17:54:16 | 2020-06-13T17:54:16 | 140,368,582 | 0 | 1 | BSD-3-Clause | 2022-10-18T20:43:15 | 2018-07-10T02:52:04 | Python | UTF-8 | Python | false | false | 26,827 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import timezones.fields
import kitsune.sumo.models
class Migration(migrations.Migration):
dependencies = [
('users', '0009_change_locale_sr_Cyrl_to_sr'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='locale',
field=kitsune.sumo.models.LocaleField(default=b'en-US', max_length=7, verbose_name='Preferred language', choices=[(b'af', 'Afrikaans'), (b'ar', '\u0639\u0631\u0628\u064a'), (b'az', 'Az\u0259rbaycanca'), (b'bg', '\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438'), (b'bm', 'Bamanankan'), (b'bn-BD', '\u09ac\u09be\u0982\u09b2\u09be (\u09ac\u09be\u0982\u09b2\u09be\u09a6\u09c7\u09b6)'), (b'bn-IN', '\u09ac\u09be\u0982\u09b2\u09be (\u09ad\u09be\u09b0\u09a4)'), (b'bs', 'Bosanski'), (b'ca', 'catal\xe0'), (b'cs', '\u010ce\u0161tina'), (b'da', 'Dansk'), (b'de', 'Deutsch'), (b'ee', '\xc8\u028begbe'), (b'el', '\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac'), (b'en-US', 'English'), (b'es', 'Espa\xf1ol'), (b'et', 'eesti keel'), (b'eu', 'Euskara'), (b'fa', '\u0641\u0627\u0631\u0633\u06cc'), (b'fi', 'suomi'), (b'fr', 'Fran\xe7ais'), (b'fy-NL', 'Frysk'), (b'ga-IE', 'Gaeilge (\xc9ire)'), (b'gl', 'Galego'), (b'gu-IN', '\u0a97\u0ac1\u0a9c\u0ab0\u0abe\u0aa4\u0ac0'), (b'ha', '\u0647\u064e\u0631\u0652\u0634\u064e\u0646 \u0647\u064e\u0648\u0652\u0633\u064e'), (b'he', '\u05e2\u05d1\u05e8\u05d9\u05ea'), (b'hi-IN', '\u0939\u093f\u0928\u094d\u0926\u0940 (\u092d\u093e\u0930\u0924)'), (b'hr', 'Hrvatski'), (b'hu', 'Magyar'), (b'dsb', 'Dolnoserb\u0161\u0107ina'), (b'hsb', 'Hornjoserbsce'), (b'id', 'Bahasa Indonesia'), (b'ig', 'As\u1ee5s\u1ee5 Igbo'), (b'it', 'Italiano'), (b'ja', '\u65e5\u672c\u8a9e'), (b'km', '\u1781\u17d2\u1798\u17c2\u179a'), (b'kn', '\u0c95\u0ca8\u0ccd\u0ca8\u0ca1'), (b'ko', '\ud55c\uad6d\uc5b4'), (b'ln', 'Ling\xe1la'), (b'lt', 'lietuvi\u0173 kalba'), (b'mg', 'Malagasy'), (b'mk', '\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0441\u043a\u0438'), (b'ml', '\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02'), (b'ne-NP', '\u0928\u0947\u092a\u093e\u0932\u0940'), (b'nl', 'Nederlands'), (b'no', 'Norsk'), (b'pl', 'Polski'), (b'pt-BR', 'Portugu\xeas (do Brasil)'), (b'pt-PT', 'Portugu\xeas (Europeu)'), (b'ro', 'rom\xe2n\u0103'), (b'ru', '\u0420\u0443\u0441\u0441\u043a\u0438\u0439'), (b'si', '\u0dc3\u0dd2\u0d82\u0dc4\u0dbd'), (b'sk', 'sloven\u010dina'), (b'sl', 'sloven\u0161\u010dina'), (b'sq', 'Shqip'), (b'sr', '\u0421\u0440\u043f\u0441\u043a\u0438'), (b'sw', 'Kiswahili'), (b'sv', 'Svenska'), (b'ta', '\u0ba4\u0bae\u0bbf\u0bb4\u0bcd'), (b'ta-LK', '\u0ba4\u0bae\u0bbf\u0bb4\u0bcd (\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8)'), (b'te', '\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41'), (b'th', '\u0e44\u0e17\u0e22'), (b'tn', 'Setswana'), (b'tr', 'T\xfcrk\xe7e'), (b'uk', '\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430'), (b'ur', '\u0627\u064f\u0631\u062f\u0648'), (b'vi', 'Ti\u1ebfng Vi\u1ec7t'), (b'wo', 'Wolof'), (b'xh', 'isiXhosa'), (b'yo', '\xe8d\xe8 Yor\xf9b\xe1'), (b'zh-CN', '\u4e2d\u6587 (\u7b80\u4f53)'), (b'zh-TW', '\u6b63\u9ad4\u4e2d\u6587 (\u7e41\u9ad4)'), (b'zu', 'isiZulu')]),
preserve_default=True,
),
migrations.AlterField(
model_name='profile',
name='timezone',
field=timezones.fields.TimeZoneField(default=b'US/Pacific', choices=[(b'Africa/Abidjan', b'(GMT+0000) Africa/Abidjan'), (b'Africa/Accra', b'(GMT+0000) Africa/Accra'), (b'Africa/Addis_Ababa', b'(GMT+0300) Africa/Addis_Ababa'), (b'Africa/Algiers', b'(GMT+0100) Africa/Algiers'), (b'Africa/Asmara', b'(GMT+0300) Africa/Asmara'), (b'Africa/Bamako', b'(GMT+0000) Africa/Bamako'), (b'Africa/Bangui', b'(GMT+0100) Africa/Bangui'), (b'Africa/Banjul', b'(GMT+0000) Africa/Banjul'), (b'Africa/Bissau', b'(GMT+0000) Africa/Bissau'), (b'Africa/Blantyre', b'(GMT+0200) Africa/Blantyre'), (b'Africa/Brazzaville', b'(GMT+0100) Africa/Brazzaville'), (b'Africa/Bujumbura', b'(GMT+0200) Africa/Bujumbura'), (b'Africa/Cairo', b'(GMT+0200) Africa/Cairo'), (b'Africa/Casablanca', b'(GMT+0000) Africa/Casablanca'), (b'Africa/Ceuta', b'(GMT+0100) Africa/Ceuta'), (b'Africa/Conakry', b'(GMT+0000) Africa/Conakry'), (b'Africa/Dakar', b'(GMT+0000) Africa/Dakar'), (b'Africa/Dar_es_Salaam', b'(GMT+0300) Africa/Dar_es_Salaam'), (b'Africa/Djibouti', b'(GMT+0300) Africa/Djibouti'), (b'Africa/Douala', b'(GMT+0100) Africa/Douala'), (b'Africa/El_Aaiun', b'(GMT+0000) Africa/El_Aaiun'), (b'Africa/Freetown', b'(GMT+0000) Africa/Freetown'), (b'Africa/Gaborone', b'(GMT+0200) Africa/Gaborone'), (b'Africa/Harare', b'(GMT+0200) Africa/Harare'), (b'Africa/Johannesburg', b'(GMT+0200) Africa/Johannesburg'), (b'Africa/Juba', b'(GMT+0300) Africa/Juba'), (b'Africa/Kampala', b'(GMT+0300) Africa/Kampala'), (b'Africa/Khartoum', b'(GMT+0300) Africa/Khartoum'), (b'Africa/Kigali', b'(GMT+0200) Africa/Kigali'), (b'Africa/Kinshasa', b'(GMT+0100) Africa/Kinshasa'), (b'Africa/Lagos', b'(GMT+0100) Africa/Lagos'), (b'Africa/Libreville', b'(GMT+0100) Africa/Libreville'), (b'Africa/Lome', b'(GMT+0000) Africa/Lome'), (b'Africa/Luanda', b'(GMT+0100) Africa/Luanda'), (b'Africa/Lubumbashi', b'(GMT+0200) Africa/Lubumbashi'), (b'Africa/Lusaka', b'(GMT+0200) Africa/Lusaka'), (b'Africa/Malabo', b'(GMT+0100) Africa/Malabo'), (b'Africa/Maputo', b'(GMT+0200) Africa/Maputo'), (b'Africa/Maseru', b'(GMT+0200) Africa/Maseru'), (b'Africa/Mbabane', b'(GMT+0200) Africa/Mbabane'), (b'Africa/Mogadishu', b'(GMT+0300) Africa/Mogadishu'), (b'Africa/Monrovia', b'(GMT+0000) Africa/Monrovia'), (b'Africa/Nairobi', b'(GMT+0300) Africa/Nairobi'), (b'Africa/Ndjamena', b'(GMT+0100) Africa/Ndjamena'), (b'Africa/Niamey', b'(GMT+0100) Africa/Niamey'), (b'Africa/Nouakchott', b'(GMT+0000) Africa/Nouakchott'), (b'Africa/Ouagadougou', b'(GMT+0000) Africa/Ouagadougou'), (b'Africa/Porto-Novo', b'(GMT+0100) Africa/Porto-Novo'), (b'Africa/Sao_Tome', b'(GMT+0000) Africa/Sao_Tome'), (b'Africa/Tripoli', b'(GMT+0100) Africa/Tripoli'), (b'Africa/Tunis', b'(GMT+0100) Africa/Tunis'), (b'Africa/Windhoek', b'(GMT+0200) Africa/Windhoek'), (b'America/Adak', b'(GMT-1000) America/Adak'), (b'America/Anchorage', b'(GMT-0900) America/Anchorage'), (b'America/Anguilla', b'(GMT-0400) America/Anguilla'), (b'America/Antigua', b'(GMT-0400) America/Antigua'), (b'America/Araguaina', b'(GMT-0200) America/Araguaina'), (b'America/Argentina/Buenos_Aires', b'(GMT-0300) America/Argentina/Buenos_Aires'), (b'America/Argentina/Catamarca', b'(GMT-0300) America/Argentina/Catamarca'), (b'America/Argentina/Cordoba', b'(GMT-0300) America/Argentina/Cordoba'), (b'America/Argentina/Jujuy', b'(GMT-0300) America/Argentina/Jujuy'), (b'America/Argentina/La_Rioja', b'(GMT-0300) America/Argentina/La_Rioja'), (b'America/Argentina/Mendoza', b'(GMT-0300) America/Argentina/Mendoza'), (b'America/Argentina/Rio_Gallegos', b'(GMT-0300) America/Argentina/Rio_Gallegos'), (b'America/Argentina/Salta', b'(GMT-0300) America/Argentina/Salta'), (b'America/Argentina/San_Juan', b'(GMT-0300) America/Argentina/San_Juan'), (b'America/Argentina/San_Luis', b'(GMT-0300) America/Argentina/San_Luis'), (b'America/Argentina/Tucuman', b'(GMT-0300) America/Argentina/Tucuman'), (b'America/Argentina/Ushuaia', b'(GMT-0300) America/Argentina/Ushuaia'), (b'America/Aruba', b'(GMT-0400) America/Aruba'), (b'America/Asuncion', b'(GMT-0300) America/Asuncion'), (b'America/Atikokan', b'(GMT-0500) America/Atikokan'), (b'America/Bahia', b'(GMT-0300) America/Bahia'), (b'America/Bahia_Banderas', b'(GMT-0600) America/Bahia_Banderas'), (b'America/Barbados', b'(GMT-0400) America/Barbados'), (b'America/Belem', b'(GMT-0300) America/Belem'), (b'America/Belize', b'(GMT-0600) America/Belize'), (b'America/Blanc-Sablon', b'(GMT-0400) America/Blanc-Sablon'), (b'America/Boa_Vista', b'(GMT-0400) America/Boa_Vista'), (b'America/Bogota', b'(GMT-0500) America/Bogota'), (b'America/Boise', b'(GMT-0700) America/Boise'), (b'America/Cambridge_Bay', b'(GMT-0700) America/Cambridge_Bay'), (b'America/Campo_Grande', b'(GMT-0300) America/Campo_Grande'), (b'America/Cancun', b'(GMT-0600) America/Cancun'), (b'America/Caracas', b'(GMT-0430) America/Caracas'), (b'America/Cayenne', b'(GMT-0300) America/Cayenne'), (b'America/Cayman', b'(GMT-0500) America/Cayman'), (b'America/Chicago', b'(GMT-0600) America/Chicago'), (b'America/Chihuahua', b'(GMT-0700) America/Chihuahua'), (b'America/Costa_Rica', b'(GMT-0600) America/Costa_Rica'), (b'America/Creston', b'(GMT-0700) America/Creston'), (b'America/Cuiaba', b'(GMT-0300) America/Cuiaba'), (b'America/Curacao', b'(GMT-0400) America/Curacao'), (b'America/Danmarkshavn', b'(GMT+0000) America/Danmarkshavn'), (b'America/Dawson', b'(GMT-0800) America/Dawson'), (b'America/Dawson_Creek', b'(GMT-0700) America/Dawson_Creek'), (b'America/Denver', b'(GMT-0700) America/Denver'), (b'America/Detroit', b'(GMT-0500) America/Detroit'), (b'America/Dominica', b'(GMT-0400) America/Dominica'), (b'America/Edmonton', b'(GMT-0700) America/Edmonton'), (b'America/Eirunepe', b'(GMT-0400) America/Eirunepe'), (b'America/El_Salvador', b'(GMT-0600) America/El_Salvador'), (b'America/Fortaleza', b'(GMT-0300) America/Fortaleza'), (b'America/Glace_Bay', b'(GMT-0400) America/Glace_Bay'), (b'America/Godthab', b'(GMT-0300) America/Godthab'), (b'America/Goose_Bay', b'(GMT-0400) America/Goose_Bay'), (b'America/Grand_Turk', b'(GMT-0500) America/Grand_Turk'), (b'America/Grenada', b'(GMT-0400) America/Grenada'), (b'America/Guadeloupe', b'(GMT-0400) America/Guadeloupe'), (b'America/Guatemala', b'(GMT-0600) America/Guatemala'), (b'America/Guayaquil', b'(GMT-0500) America/Guayaquil'), (b'America/Guyana', b'(GMT-0400) America/Guyana'), (b'America/Halifax', b'(GMT-0400) America/Halifax'), (b'America/Havana', b'(GMT-0500) America/Havana'), (b'America/Hermosillo', b'(GMT-0700) America/Hermosillo'), (b'America/Indiana/Indianapolis', b'(GMT-0500) America/Indiana/Indianapolis'), (b'America/Indiana/Knox', b'(GMT-0600) America/Indiana/Knox'), (b'America/Indiana/Marengo', b'(GMT-0500) America/Indiana/Marengo'), (b'America/Indiana/Petersburg', b'(GMT-0500) America/Indiana/Petersburg'), (b'America/Indiana/Tell_City', b'(GMT-0600) America/Indiana/Tell_City'), (b'America/Indiana/Vevay', b'(GMT-0500) America/Indiana/Vevay'), (b'America/Indiana/Vincennes', b'(GMT-0500) America/Indiana/Vincennes'), (b'America/Indiana/Winamac', b'(GMT-0500) America/Indiana/Winamac'), (b'America/Inuvik', b'(GMT-0700) America/Inuvik'), (b'America/Iqaluit', b'(GMT-0500) America/Iqaluit'), (b'America/Jamaica', b'(GMT-0500) America/Jamaica'), (b'America/Juneau', b'(GMT-0900) America/Juneau'), (b'America/Kentucky/Louisville', b'(GMT-0500) America/Kentucky/Louisville'), (b'America/Kentucky/Monticello', b'(GMT-0500) America/Kentucky/Monticello'), (b'America/Kralendijk', b'(GMT-0400) America/Kralendijk'), (b'America/La_Paz', b'(GMT-0400) America/La_Paz'), (b'America/Lima', b'(GMT-0500) America/Lima'), (b'America/Los_Angeles', b'(GMT-0800) America/Los_Angeles'), (b'America/Lower_Princes', b'(GMT-0400) America/Lower_Princes'), (b'America/Maceio', b'(GMT-0300) America/Maceio'), (b'America/Managua', b'(GMT-0600) America/Managua'), (b'America/Manaus', b'(GMT-0400) America/Manaus'), (b'America/Marigot', b'(GMT-0400) America/Marigot'), (b'America/Martinique', b'(GMT-0400) America/Martinique'), (b'America/Matamoros', b'(GMT-0600) America/Matamoros'), (b'America/Mazatlan', b'(GMT-0700) America/Mazatlan'), (b'America/Menominee', b'(GMT-0600) America/Menominee'), (b'America/Merida', b'(GMT-0600) America/Merida'), (b'America/Metlakatla', b'(GMT-0800) America/Metlakatla'), (b'America/Mexico_City', b'(GMT-0600) America/Mexico_City'), (b'America/Miquelon', b'(GMT-0300) America/Miquelon'), (b'America/Moncton', b'(GMT-0400) America/Moncton'), (b'America/Monterrey', b'(GMT-0600) America/Monterrey'), (b'America/Montevideo', b'(GMT-0200) America/Montevideo'), (b'America/Montreal', b'(GMT-0500) America/Montreal'), (b'America/Montserrat', b'(GMT-0400) America/Montserrat'), (b'America/Nassau', b'(GMT-0500) America/Nassau'), (b'America/New_York', b'(GMT-0500) America/New_York'), (b'America/Nipigon', b'(GMT-0500) America/Nipigon'), (b'America/Nome', b'(GMT-0900) America/Nome'), (b'America/Noronha', b'(GMT-0200) America/Noronha'), (b'America/North_Dakota/Beulah', b'(GMT-0600) America/North_Dakota/Beulah'), (b'America/North_Dakota/Center', b'(GMT-0600) America/North_Dakota/Center'), (b'America/North_Dakota/New_Salem', b'(GMT-0600) America/North_Dakota/New_Salem'), (b'America/Ojinaga', b'(GMT-0700) America/Ojinaga'), (b'America/Panama', b'(GMT-0500) America/Panama'), (b'America/Pangnirtung', b'(GMT-0500) America/Pangnirtung'), (b'America/Paramaribo', b'(GMT-0300) America/Paramaribo'), (b'America/Phoenix', b'(GMT-0700) America/Phoenix'), (b'America/Port-au-Prince', b'(GMT-0500) America/Port-au-Prince'), (b'America/Port_of_Spain', b'(GMT-0400) America/Port_of_Spain'), (b'America/Porto_Velho', b'(GMT-0400) America/Porto_Velho'), (b'America/Puerto_Rico', b'(GMT-0400) America/Puerto_Rico'), (b'America/Rainy_River', b'(GMT-0600) America/Rainy_River'), (b'America/Rankin_Inlet', b'(GMT-0600) America/Rankin_Inlet'), (b'America/Recife', b'(GMT-0300) America/Recife'), (b'America/Regina', b'(GMT-0600) America/Regina'), (b'America/Resolute', b'(GMT-0600) America/Resolute'), (b'America/Rio_Branco', b'(GMT-0400) America/Rio_Branco'), (b'America/Santa_Isabel', b'(GMT-0800) America/Santa_Isabel'), (b'America/Santarem', b'(GMT-0300) America/Santarem'), (b'America/Santiago', b'(GMT-0300) America/Santiago'), (b'America/Santo_Domingo', b'(GMT-0400) America/Santo_Domingo'), (b'America/Sao_Paulo', b'(GMT-0200) America/Sao_Paulo'), (b'America/Scoresbysund', b'(GMT-0100) America/Scoresbysund'), (b'America/Shiprock', b'(GMT-0700) America/Shiprock'), (b'America/Sitka', b'(GMT-0900) America/Sitka'), (b'America/St_Barthelemy', b'(GMT-0400) America/St_Barthelemy'), (b'America/St_Johns', b'(GMT-0330) America/St_Johns'), (b'America/St_Kitts', b'(GMT-0400) America/St_Kitts'), (b'America/St_Lucia', b'(GMT-0400) America/St_Lucia'), (b'America/St_Thomas', b'(GMT-0400) America/St_Thomas'), (b'America/St_Vincent', b'(GMT-0400) America/St_Vincent'), (b'America/Swift_Current', b'(GMT-0600) America/Swift_Current'), (b'America/Tegucigalpa', b'(GMT-0600) America/Tegucigalpa'), (b'America/Thule', b'(GMT-0400) America/Thule'), (b'America/Thunder_Bay', b'(GMT-0500) America/Thunder_Bay'), (b'America/Tijuana', b'(GMT-0800) America/Tijuana'), (b'America/Toronto', b'(GMT-0500) America/Toronto'), (b'America/Tortola', b'(GMT-0400) America/Tortola'), (b'America/Vancouver', b'(GMT-0800) America/Vancouver'), (b'America/Whitehorse', b'(GMT-0800) America/Whitehorse'), (b'America/Winnipeg', b'(GMT-0600) America/Winnipeg'), (b'America/Yakutat', b'(GMT-0900) America/Yakutat'), (b'America/Yellowknife', b'(GMT-0700) America/Yellowknife'), (b'Antarctica/Casey', b'(GMT+0800) Antarctica/Casey'), (b'Antarctica/Davis', b'(GMT+0700) Antarctica/Davis'), (b'Antarctica/DumontDUrville', b'(GMT+1000) Antarctica/DumontDUrville'), (b'Antarctica/Macquarie', b'(GMT+1100) Antarctica/Macquarie'), (b'Antarctica/Mawson', b'(GMT+0500) Antarctica/Mawson'), (b'Antarctica/McMurdo', b'(GMT+1300) Antarctica/McMurdo'), (b'Antarctica/Palmer', b'(GMT-0300) Antarctica/Palmer'), (b'Antarctica/Rothera', b'(GMT-0300) Antarctica/Rothera'), (b'Antarctica/South_Pole', b'(GMT+1300) Antarctica/South_Pole'), (b'Antarctica/Syowa', b'(GMT+0300) Antarctica/Syowa'), (b'Antarctica/Vostok', b'(GMT+0600) Antarctica/Vostok'), (b'Arctic/Longyearbyen', b'(GMT+0100) Arctic/Longyearbyen'), (b'Asia/Aden', b'(GMT+0300) Asia/Aden'), (b'Asia/Almaty', b'(GMT+0600) Asia/Almaty'), (b'Asia/Amman', b'(GMT+0200) Asia/Amman'), (b'Asia/Anadyr', b'(GMT+1200) Asia/Anadyr'), (b'Asia/Aqtau', b'(GMT+0500) Asia/Aqtau'), (b'Asia/Aqtobe', b'(GMT+0500) Asia/Aqtobe'), (b'Asia/Ashgabat', b'(GMT+0500) Asia/Ashgabat'), (b'Asia/Baghdad', b'(GMT+0300) Asia/Baghdad'), (b'Asia/Bahrain', b'(GMT+0300) Asia/Bahrain'), (b'Asia/Baku', b'(GMT+0400) Asia/Baku'), (b'Asia/Bangkok', b'(GMT+0700) Asia/Bangkok'), (b'Asia/Beirut', b'(GMT+0200) Asia/Beirut'), (b'Asia/Bishkek', b'(GMT+0600) Asia/Bishkek'), (b'Asia/Brunei', b'(GMT+0800) Asia/Brunei'), (b'Asia/Choibalsan', b'(GMT+0800) Asia/Choibalsan'), (b'Asia/Chongqing', b'(GMT+0800) Asia/Chongqing'), (b'Asia/Colombo', b'(GMT+0530) Asia/Colombo'), (b'Asia/Damascus', b'(GMT+0200) Asia/Damascus'), (b'Asia/Dhaka', b'(GMT+0600) Asia/Dhaka'), (b'Asia/Dili', b'(GMT+0900) Asia/Dili'), (b'Asia/Dubai', b'(GMT+0400) Asia/Dubai'), (b'Asia/Dushanbe', b'(GMT+0500) Asia/Dushanbe'), (b'Asia/Gaza', b'(GMT+0200) Asia/Gaza'), (b'Asia/Harbin', b'(GMT+0800) Asia/Harbin'), (b'Asia/Hebron', b'(GMT+0200) Asia/Hebron'), (b'Asia/Ho_Chi_Minh', b'(GMT+0700) Asia/Ho_Chi_Minh'), (b'Asia/Hong_Kong', b'(GMT+0800) Asia/Hong_Kong'), (b'Asia/Hovd', b'(GMT+0700) Asia/Hovd'), (b'Asia/Irkutsk', b'(GMT+0900) Asia/Irkutsk'), (b'Asia/Jakarta', b'(GMT+0700) Asia/Jakarta'), (b'Asia/Jayapura', b'(GMT+0900) Asia/Jayapura'), (b'Asia/Jerusalem', b'(GMT+0200) Asia/Jerusalem'), (b'Asia/Kabul', b'(GMT+0430) Asia/Kabul'), (b'Asia/Kamchatka', b'(GMT+1200) Asia/Kamchatka'), (b'Asia/Karachi', b'(GMT+0500) Asia/Karachi'), (b'Asia/Kashgar', b'(GMT+0800) Asia/Kashgar'), (b'Asia/Kathmandu', b'(GMT+0545) Asia/Kathmandu'), (b'Asia/Khandyga', b'(GMT+1000) Asia/Khandyga'), (b'Asia/Kolkata', b'(GMT+0530) Asia/Kolkata'), (b'Asia/Krasnoyarsk', b'(GMT+0800) Asia/Krasnoyarsk'), (b'Asia/Kuala_Lumpur', b'(GMT+0800) Asia/Kuala_Lumpur'), (b'Asia/Kuching', b'(GMT+0800) Asia/Kuching'), (b'Asia/Kuwait', b'(GMT+0300) Asia/Kuwait'), (b'Asia/Macau', b'(GMT+0800) Asia/Macau'), (b'Asia/Magadan', b'(GMT+1200) Asia/Magadan'), (b'Asia/Makassar', b'(GMT+0800) Asia/Makassar'), (b'Asia/Manila', b'(GMT+0800) Asia/Manila'), (b'Asia/Muscat', b'(GMT+0400) Asia/Muscat'), (b'Asia/Nicosia', b'(GMT+0200) Asia/Nicosia'), (b'Asia/Novokuznetsk', b'(GMT+0700) Asia/Novokuznetsk'), (b'Asia/Novosibirsk', b'(GMT+0700) Asia/Novosibirsk'), (b'Asia/Omsk', b'(GMT+0700) Asia/Omsk'), (b'Asia/Oral', b'(GMT+0500) Asia/Oral'), (b'Asia/Phnom_Penh', b'(GMT+0700) Asia/Phnom_Penh'), (b'Asia/Pontianak', b'(GMT+0700) Asia/Pontianak'), (b'Asia/Pyongyang', b'(GMT+0900) Asia/Pyongyang'), (b'Asia/Qatar', b'(GMT+0300) Asia/Qatar'), (b'Asia/Qyzylorda', b'(GMT+0600) Asia/Qyzylorda'), (b'Asia/Rangoon', b'(GMT+0630) Asia/Rangoon'), (b'Asia/Riyadh', b'(GMT+0300) Asia/Riyadh'), (b'Asia/Sakhalin', b'(GMT+1100) Asia/Sakhalin'), (b'Asia/Samarkand', b'(GMT+0500) Asia/Samarkand'), (b'Asia/Seoul', b'(GMT+0900) Asia/Seoul'), (b'Asia/Shanghai', b'(GMT+0800) Asia/Shanghai'), (b'Asia/Singapore', b'(GMT+0800) Asia/Singapore'), (b'Asia/Taipei', b'(GMT+0800) Asia/Taipei'), (b'Asia/Tashkent', b'(GMT+0500) Asia/Tashkent'), (b'Asia/Tbilisi', b'(GMT+0400) Asia/Tbilisi'), (b'Asia/Tehran', b'(GMT+0330) Asia/Tehran'), (b'Asia/Thimphu', b'(GMT+0600) Asia/Thimphu'), (b'Asia/Tokyo', b'(GMT+0900) Asia/Tokyo'), (b'Asia/Ulaanbaatar', b'(GMT+0800) Asia/Ulaanbaatar'), (b'Asia/Urumqi', b'(GMT+0800) Asia/Urumqi'), (b'Asia/Ust-Nera', b'(GMT+1100) Asia/Ust-Nera'), (b'Asia/Vientiane', b'(GMT+0700) Asia/Vientiane'), (b'Asia/Vladivostok', b'(GMT+1100) Asia/Vladivostok'), (b'Asia/Yakutsk', b'(GMT+1000) Asia/Yakutsk'), (b'Asia/Yekaterinburg', b'(GMT+0600) Asia/Yekaterinburg'), (b'Asia/Yerevan', b'(GMT+0400) Asia/Yerevan'), (b'Atlantic/Azores', b'(GMT-0100) Atlantic/Azores'), (b'Atlantic/Bermuda', b'(GMT-0400) Atlantic/Bermuda'), (b'Atlantic/Canary', b'(GMT+0000) Atlantic/Canary'), (b'Atlantic/Cape_Verde', b'(GMT-0100) Atlantic/Cape_Verde'), (b'Atlantic/Faroe', b'(GMT+0000) Atlantic/Faroe'), (b'Atlantic/Madeira', b'(GMT+0000) Atlantic/Madeira'), (b'Atlantic/Reykjavik', b'(GMT+0000) Atlantic/Reykjavik'), (b'Atlantic/South_Georgia', b'(GMT-0200) Atlantic/South_Georgia'), (b'Atlantic/St_Helena', b'(GMT+0000) Atlantic/St_Helena'), (b'Atlantic/Stanley', b'(GMT-0300) Atlantic/Stanley'), (b'Australia/Adelaide', b'(GMT+1030) Australia/Adelaide'), (b'Australia/Brisbane', b'(GMT+1000) Australia/Brisbane'), (b'Australia/Broken_Hill', b'(GMT+1030) Australia/Broken_Hill'), (b'Australia/Currie', b'(GMT+1100) Australia/Currie'), (b'Australia/Darwin', b'(GMT+0930) Australia/Darwin'), (b'Australia/Eucla', b'(GMT+0845) Australia/Eucla'), (b'Australia/Hobart', b'(GMT+1100) Australia/Hobart'), (b'Australia/Lindeman', b'(GMT+1000) Australia/Lindeman'), (b'Australia/Lord_Howe', b'(GMT+1100) Australia/Lord_Howe'), (b'Australia/Melbourne', b'(GMT+1100) Australia/Melbourne'), (b'Australia/Perth', b'(GMT+0800) Australia/Perth'), (b'Australia/Sydney', b'(GMT+1100) Australia/Sydney'), (b'Canada/Atlantic', b'(GMT-0400) Canada/Atlantic'), (b'Canada/Central', b'(GMT-0600) Canada/Central'), (b'Canada/Eastern', b'(GMT-0500) Canada/Eastern'), (b'Canada/Mountain', b'(GMT-0700) Canada/Mountain'), (b'Canada/Newfoundland', b'(GMT-0330) Canada/Newfoundland'), (b'Canada/Pacific', b'(GMT-0800) Canada/Pacific'), (b'Europe/Amsterdam', b'(GMT+0100) Europe/Amsterdam'), (b'Europe/Andorra', b'(GMT+0100) Europe/Andorra'), (b'Europe/Athens', b'(GMT+0200) Europe/Athens'), (b'Europe/Belgrade', b'(GMT+0100) Europe/Belgrade'), (b'Europe/Berlin', b'(GMT+0100) Europe/Berlin'), (b'Europe/Bratislava', b'(GMT+0100) Europe/Bratislava'), (b'Europe/Brussels', b'(GMT+0100) Europe/Brussels'), (b'Europe/Bucharest', b'(GMT+0200) Europe/Bucharest'), (b'Europe/Budapest', b'(GMT+0100) Europe/Budapest'), (b'Europe/Busingen', b'(GMT+0100) Europe/Busingen'), (b'Europe/Chisinau', b'(GMT+0200) Europe/Chisinau'), (b'Europe/Copenhagen', b'(GMT+0100) Europe/Copenhagen'), (b'Europe/Dublin', b'(GMT+0000) Europe/Dublin'), (b'Europe/Gibraltar', b'(GMT+0100) Europe/Gibraltar'), (b'Europe/Guernsey', b'(GMT+0000) Europe/Guernsey'), (b'Europe/Helsinki', b'(GMT+0200) Europe/Helsinki'), (b'Europe/Isle_of_Man', b'(GMT+0000) Europe/Isle_of_Man'), (b'Europe/Istanbul', b'(GMT+0200) Europe/Istanbul'), (b'Europe/Jersey', b'(GMT+0000) Europe/Jersey'), (b'Europe/Kaliningrad', b'(GMT+0300) Europe/Kaliningrad'), (b'Europe/Kiev', b'(GMT+0200) Europe/Kiev'), (b'Europe/Lisbon', b'(GMT+0000) Europe/Lisbon'), (b'Europe/Ljubljana', b'(GMT+0100) Europe/Ljubljana'), (b'Europe/London', b'(GMT+0000) Europe/London'), (b'Europe/Luxembourg', b'(GMT+0100) Europe/Luxembourg'), (b'Europe/Madrid', b'(GMT+0100) Europe/Madrid'), (b'Europe/Malta', b'(GMT+0100) Europe/Malta'), (b'Europe/Mariehamn', b'(GMT+0200) Europe/Mariehamn'), (b'Europe/Minsk', b'(GMT+0300) Europe/Minsk'), (b'Europe/Monaco', b'(GMT+0100) Europe/Monaco'), (b'Europe/Moscow', b'(GMT+0400) Europe/Moscow'), (b'Europe/Oslo', b'(GMT+0100) Europe/Oslo'), (b'Europe/Paris', b'(GMT+0100) Europe/Paris'), (b'Europe/Podgorica', b'(GMT+0100) Europe/Podgorica'), (b'Europe/Prague', b'(GMT+0100) Europe/Prague'), (b'Europe/Riga', b'(GMT+0200) Europe/Riga'), (b'Europe/Rome', b'(GMT+0100) Europe/Rome'), (b'Europe/Samara', b'(GMT+0400) Europe/Samara'), (b'Europe/San_Marino', b'(GMT+0100) Europe/San_Marino'), (b'Europe/Sarajevo', b'(GMT+0100) Europe/Sarajevo'), (b'Europe/Simferopol', b'(GMT+0200) Europe/Simferopol'), (b'Europe/Skopje', b'(GMT+0100) Europe/Skopje'), (b'Europe/Sofia', b'(GMT+0200) Europe/Sofia'), (b'Europe/Stockholm', b'(GMT+0100) Europe/Stockholm'), (b'Europe/Tallinn', b'(GMT+0200) Europe/Tallinn'), (b'Europe/Tirane', b'(GMT+0100) Europe/Tirane'), (b'Europe/Uzhgorod', b'(GMT+0200) Europe/Uzhgorod'), (b'Europe/Vaduz', b'(GMT+0100) Europe/Vaduz'), (b'Europe/Vatican', b'(GMT+0100) Europe/Vatican'), (b'Europe/Vienna', b'(GMT+0100) Europe/Vienna'), (b'Europe/Vilnius', b'(GMT+0200) Europe/Vilnius'), (b'Europe/Volgograd', b'(GMT+0400) Europe/Volgograd'), (b'Europe/Warsaw', b'(GMT+0100) Europe/Warsaw'), (b'Europe/Zagreb', b'(GMT+0100) Europe/Zagreb'), (b'Europe/Zaporozhye', b'(GMT+0200) Europe/Zaporozhye'), (b'Europe/Zurich', b'(GMT+0100) Europe/Zurich'), (b'GMT', b'(GMT+0000) GMT'), (b'Indian/Antananarivo', b'(GMT+0300) Indian/Antananarivo'), (b'Indian/Chagos', b'(GMT+0600) Indian/Chagos'), (b'Indian/Christmas', b'(GMT+0700) Indian/Christmas'), (b'Indian/Cocos', b'(GMT+0630) Indian/Cocos'), (b'Indian/Comoro', b'(GMT+0300) Indian/Comoro'), (b'Indian/Kerguelen', b'(GMT+0500) Indian/Kerguelen'), (b'Indian/Mahe', b'(GMT+0400) Indian/Mahe'), (b'Indian/Maldives', b'(GMT+0500) Indian/Maldives'), (b'Indian/Mauritius', b'(GMT+0400) Indian/Mauritius'), (b'Indian/Mayotte', b'(GMT+0300) Indian/Mayotte'), (b'Indian/Reunion', b'(GMT+0400) Indian/Reunion'), (b'Pacific/Apia', b'(GMT+1400) Pacific/Apia'), (b'Pacific/Auckland', b'(GMT+1300) Pacific/Auckland'), (b'Pacific/Chatham', b'(GMT+1345) Pacific/Chatham'), (b'Pacific/Chuuk', b'(GMT+1000) Pacific/Chuuk'), (b'Pacific/Easter', b'(GMT-0500) Pacific/Easter'), (b'Pacific/Efate', b'(GMT+1100) Pacific/Efate'), (b'Pacific/Enderbury', b'(GMT+1300) Pacific/Enderbury'), (b'Pacific/Fakaofo', b'(GMT+1300) Pacific/Fakaofo'), (b'Pacific/Fiji', b'(GMT+1300) Pacific/Fiji'), (b'Pacific/Funafuti', b'(GMT+1200) Pacific/Funafuti'), (b'Pacific/Galapagos', b'(GMT-0600) Pacific/Galapagos'), (b'Pacific/Gambier', b'(GMT-0900) Pacific/Gambier'), (b'Pacific/Guadalcanal', b'(GMT+1100) Pacific/Guadalcanal'), (b'Pacific/Guam', b'(GMT+1000) Pacific/Guam'), (b'Pacific/Honolulu', b'(GMT-1000) Pacific/Honolulu'), (b'Pacific/Johnston', b'(GMT-1000) Pacific/Johnston'), (b'Pacific/Kiritimati', b'(GMT+1400) Pacific/Kiritimati'), (b'Pacific/Kosrae', b'(GMT+1100) Pacific/Kosrae'), (b'Pacific/Kwajalein', b'(GMT+1200) Pacific/Kwajalein'), (b'Pacific/Majuro', b'(GMT+1200) Pacific/Majuro'), (b'Pacific/Marquesas', b'(GMT-0930) Pacific/Marquesas'), (b'Pacific/Midway', b'(GMT-1100) Pacific/Midway'), (b'Pacific/Nauru', b'(GMT+1200) Pacific/Nauru'), (b'Pacific/Niue', b'(GMT-1100) Pacific/Niue'), (b'Pacific/Norfolk', b'(GMT+1130) Pacific/Norfolk'), (b'Pacific/Noumea', b'(GMT+1100) Pacific/Noumea'), (b'Pacific/Pago_Pago', b'(GMT-1100) Pacific/Pago_Pago'), (b'Pacific/Palau', b'(GMT+0900) Pacific/Palau'), (b'Pacific/Pitcairn', b'(GMT-0800) Pacific/Pitcairn'), (b'Pacific/Pohnpei', b'(GMT+1100) Pacific/Pohnpei'), (b'Pacific/Port_Moresby', b'(GMT+1000) Pacific/Port_Moresby'), (b'Pacific/Rarotonga', b'(GMT-1000) Pacific/Rarotonga'), (b'Pacific/Saipan', b'(GMT+1000) Pacific/Saipan'), (b'Pacific/Tahiti', b'(GMT-1000) Pacific/Tahiti'), (b'Pacific/Tarawa', b'(GMT+1200) Pacific/Tarawa'), (b'Pacific/Tongatapu', b'(GMT+1300) Pacific/Tongatapu'), (b'Pacific/Wake', b'(GMT+1200) Pacific/Wake'), (b'Pacific/Wallis', b'(GMT+1200) Pacific/Wallis'), (b'US/Alaska', b'(GMT-0900) US/Alaska'), (b'US/Arizona', b'(GMT-0700) US/Arizona'), (b'US/Central', b'(GMT-0600) US/Central'), (b'US/Eastern', b'(GMT-0500) US/Eastern'), (b'US/Hawaii', b'(GMT-1000) US/Hawaii'), (b'US/Mountain', b'(GMT-0700) US/Mountain'), (b'US/Pacific', b'(GMT-0800) US/Pacific'), (b'UTC', b'(GMT+0000) UTC')], max_length=100, blank=True, null=True, verbose_name='Timezone'),
preserve_default=True,
),
]
| [
"willkg@mozilla.com"
] | willkg@mozilla.com |
59789cb9c983d28dc952d267e6db4245284bedb2 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/132/usersdata/260/41472/submittedfiles/al14.py | c446200a7c5705038df4fc037e84245344b06221 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | # -*- coding: utf-8 -*-
n=int(input("digite o número de pessoas:"))
altura=0
for i in range (1,n+1,1):
alturas=int(input("digite a altura dessas pessoa:"+str(i)))
total=alturas+total
media=total//n
print(media)
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
2ba8c74885ea992b4ecf86a44ac01f05dcfb83be | abad82a1f487c5ff2fb6a84059a665aa178275cb | /Codewars/8kyu/exclamation-marks-series-number-6-remove-n-exclamation-marks-in-the-sentence-from-left-to-right/Python/solution1.py | 2807034fb3df1b06926c996bb515a33858c445dd | [
"MIT"
] | permissive | RevansChen/online-judge | 8ae55f136739a54f9c9640a967ec931425379507 | ad1b07fee7bd3c49418becccda904e17505f3018 | refs/heads/master | 2021-01-19T23:02:58.273081 | 2019-07-05T09:42:40 | 2019-07-05T09:42:40 | 88,911,035 | 9 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | # Python - 3.6.0
remove = lambda s, n: ''.join(s.split('!', n))
| [
"d79523@hotmail.com"
] | d79523@hotmail.com |
2a0ec0f69f93cf994d8be8e8c04d26b4bdf2deb8 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/viorels-mtgox-trader/allPythonContent.py | d51e46a1c1721e51f34bbdefb9c57053b166785e | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,472 | py | __FILENAME__ = api
from httplib2 import Http
import simplejson as json
from urlparse import urlunparse
from urllib import urlencode
class ServerError(Exception):
def __init__(self, ret):
self.ret = ret
def __str__(self):
return "Server error: %s" % self.ret
class UserError(Exception):
def __init__(self, errmsg):
self.errmsg = errmsg
def __str__(self):
return self.errmsg
class MTGox:
"""MTGox API"""
def __init__(self, user, password):
self.user = user
self.password = password
self.server = "mtgox.com"
self.timeout = 10
self.actions = {"_get_ticker": ("GET", "/code/data/ticker.php"),
"get_depth": ("GET", "/code/data/getDepth.php"),
"get_trades": ("GET", "/code/data/getTrades.php"),
"get_balance": ("POST", "/code/getFunds.php"),
"buy_btc": ("POST", "/code/buyBTC.php"),
"sell_btc": ("POST", "/code/sellBTC.php"),
"_get_orders": ("POST", "/code/getOrders.php"),
"_cancel_order": ("POST", "/code/cancelOrder.php"),
"_withdraw": ("POST", "/code/withdraw.php")}
for action, (method, _) in self.actions.items():
def _handler(action=action, **args):
return self._request(action, method=method, args=args)
setattr(self, action, _handler)
def get_ticker(self):
return self._get_ticker()["ticker"]
def get_orders(self):
return self._get_orders()["orders"] # can also return balance
def cancel_order(self, oid, typ=None):
orders = self.get_orders()
if typ is None:
order = [o for o in orders if o["oid"] == oid]
if order:
typ = order[0]["type"]
else:
raise UserError("unknown order/type")
return self._cancel_order(oid=oid, type=typ)
def withdraw(self, amount, btca, group1="BTC"):
return self._withdraw(amount=amount, btca=btca, group1=group1)["status"] # can also return balance
def _request(self, action, method="GET", args={}):
query = args.copy()
data = None
headers = {}
if method == "GET":
url = self._url(action)
if method == "POST":
url = self._url(action, scheme="https")
query["name"] = self.user
query["pass"] = self.password
data = urlencode(query)
headers['Content-type'] = 'application/x-www-form-urlencoded'
h = Http(cache=None, timeout=self.timeout)
try:
#print "%s %s\n> |%s|" % (method, url, data)
resp, content = h.request(url, method, headers=headers, body=data)
#print "< %s (%s)" % (content, resp)
if resp.status == 200:
data = json.loads(content)
if "error" in data:
raise UserError(data["error"])
else:
return data
else:
raise ServerError(content)
except AttributeError, e: # 'NoneType' object has no attribute 'makefile'
raise ServerError("timeout/refused")
except ValueError, e:
raise ServerError("%s: %s" % (e, content))
def _url(self, action, scheme="http", args={}):
url = urlunparse((scheme,
self.server,
self.actions[action][1], # path
'',
urlencode(args),
''))
return url
class ExchB(MTGox):
def __init__(self,user,password):
MTGox.__init__(self,user,password)
self.server = "www.exchangebitcoins.com"
self.actions = {"_get_ticker": ("GET", "/data/ticker"),
"get_depth": ("GET", "/data/depth"),
"get_trades": ("GET", "/data/recent"),
"get_balance": ("POST", "/data/getFunds"),
"buy_btc": ("POST", "/data/buyBTC"),
"sell_btc": ("POST", "/data/sellBTC"),
"_get_orders": ("POST", "/data/getOrders"),
"_cancel_order": ("POST", "/data/cancelOrder")}
########NEW FILE########
__FILENAME__ = balance
#!/usr/bin/env python
from settings import *
balance = exchange.get_balance()
print balance
########NEW FILE########
__FILENAME__ = buy
#!/usr/bin/env python
import sys
from settings import *
if len(sys.argv) in (2, 3):
amount = sys.argv[1]
bid = sys.argv[2] if len(sys.argv) == 3 else None
else:
print "Usage: %s <amount> [bid]" % sys.argv[0]
exit(1)
status = exchange.buy_btc(amount=amount, price=bid)
print status
########NEW FILE########
__FILENAME__ = cancel
#!/usr/bin/env python
import sys
from settings import *
if len(sys.argv) == 2:
oid = sys.argv[1]
else:
print "Usage: %s <order id>" % sys.argv[0]
exit(1)
status = exchange.cancel_order(oid=oid)
print status
########NEW FILE########
__FILENAME__ = defaultsettings
#!/usr/bin/env python
#
# Copy to settings.py, enter credentials for the
# exchange(s) you would like to connect to and uncomment
# the corresponding exchange line.
#
from api import ExchB, MTGox
EXCHB_USER = 'your_username'
EXCHB_PASSWORD = 'your_password'
MTGOX_USER = 'your_username'
MTGOX_PASSWORD = 'your_password'
# uncomment the exchange you want to use
#exchange = ExchB(user=EXCHB_USER, password=EXCHB_PASSWORD)
exchange = MTGox(user=MTGOX_USER, password=MTGOX_PASSWORD)
########NEW FILE########
__FILENAME__ = depth
#!/usr/bin/env python
from settings import *
depth = exchange.get_depth()
bids = sorted(depth['bids'], key=lambda bid: bid[0])
asks = sorted(depth['asks'], key=lambda bid: bid[0])
print "*** Bids"
for price, amount in bids:
print "%s\t%s" % (price, amount)
print "\n*** Asks"
for price, amount in asks:
print "%s\t%s" % (price, amount)
########NEW FILE########
__FILENAME__ = orders
#!/usr/bin/env python
import time
from settings import *
orders = exchange.get_orders()
now = time.time()
for order in orders:
order["type_text"] = {1: "sell", 2: "buy", "Sell": "sell", "Buy": "buy"}[order["type"]]
if "status" in order:
order["status_text"] = {1: "active",
2: "not enough funds"}[int(order["status"])]
else:
order["status_text"] = "active"
order["ago"] = int((now - int(order["date"]))/60)
print ("%(oid)s %(type_text)s %(amount)s at %(price)s %(ago)s minutes ago, "
"%(status_text)s" % order)
########NEW FILE########
__FILENAME__ = sell
#!/usr/bin/env python
import sys
from settings import *
if len(sys.argv) in (2, 3):
amount = sys.argv[1]
ask = sys.argv[2] if len(sys.argv) == 3 else None
else:
print "Usage: %s <amount> [ask]" % sys.argv[0]
exit(1)
status = exchange.sell_btc(amount=amount, price=ask)
print status
########NEW FILE########
__FILENAME__ = ticker
#!/usr/bin/env python
from settings import *
ticker = exchange.get_ticker()
if ticker:
for key in ("last", "buy", "sell", "low", "high", "vol"):
print "%s\t: %s" % (key, ticker[key])
else:
print "failed, see logs"
########NEW FILE########
__FILENAME__ = trades
#!/usr/bin/env python
import time
from settings import *
trades = exchange.get_trades()
now = time.time()
for tr in trades:
# also print tid
print "%s \t@ %s (%s minutes ago)" % (tr["amount"],
tr["price"],
int((now - tr["date"])/60))
########NEW FILE########
__FILENAME__ = watch
#!/usr/bin/env python
import time
from settings import *
wait = 60
last_trades = {}
last_bids = []
last_asks = []
while True:
trades = exchange.get_trades()
now = time.time()
for tr in trades:
if not last_trades.has_key(tr["tid"]):
last_trades[tr["tid"]] = tr
# also print tid
print "%s: %s \t@ %s (%s minutes ago)" % (tr["tid"],
tr["amount"],
tr["price"],
int((now - tr["date"])/60))
time.sleep(wait)
continue
depth = exchange.get_depth()
bids = sorted(depth['bids'], key=lambda bid: bid[0])
asks = sorted(depth['asks'], key=lambda bid: bid[0])
print "*** Bids"
for price, amount in bids:
print "%s\t%s" % (price, amount)
print "\n*** Asks"
for price, amount in asks:
print "%s\t%s" % (price, amount)
time.sleep(wait)
########NEW FILE########
__FILENAME__ = withdraw
#!/usr/bin/env python
import sys
from settings import *
if len(sys.argv) == 3:
_, amount, address = sys.argv
else:
print "Usage: %s <amount> <BTC address>" % sys.argv[0]
exit(1)
status = exchange.withdraw(group1="BTC", btca=address, amount=amount)
print status
########NEW FILE########
| [
"dyangUCI@github.com"
] | dyangUCI@github.com |
017d4a1b0484ade3ebe995b017041b678fbe1c5d | 5c00df958c8010c3bea2e5d5ef63d63642526495 | /setup.py | 278b6215ba952ddf33240cd2b2bf597c3adf6e25 | [
"MIT"
] | permissive | ppdebreuck/matbench | 32e9be021349c8bcb04efd992205f21e9850912f | 4bc373d86671e17aeba3ecdd8a49c3ad555d4513 | refs/heads/main | 2023-08-25T19:05:55.508133 | 2021-10-11T15:39:47 | 2021-10-11T15:39:47 | 393,661,109 | 0 | 0 | MIT | 2021-08-07T11:11:38 | 2021-08-07T11:11:38 | null | UTF-8 | Python | false | false | 1,468 | py | import os
from setuptools import setup, find_packages
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(MODULE_DIR, "requirements.txt"), "r") as f:
requirements = f.read().replace(" ", "").split("\n")
# source of version is in the constants file
VERSION_FILE = os.path.join(MODULE_DIR, "matbench/constants.py")
token = "VERSION = "
with open(VERSION_FILE, "r") as f:
version = None
for line in f.readlines():
if token in line:
version = line.replace(token, "").strip()
# Double quotes are contained in the read line, remove them
version = version.replace("\"", "")
if __name__ == "__main__":
setup(
name='matbench',
version=version,
description='a machine learning benchmark for materials science',
long_description="A machine learning benchmark for materials science. "
"https://github.com/hackingmaterials/matbench",
url='https://github.com/hackingmaterials/matbench',
author=['Alex Dunn', 'Anubhav Jain'],
author_email='ardunn@lbl.gov',
license='modified BSD',
packages=find_packages(where="."),
package_data={
"matbench": ["*.json"],
"matbench.tests": ["*.json"]
},
zip_safe=False,
install_requires=requirements,
extras_require={},
test_suite='matbench',
tests_require='tests',
include_package_data=True
)
| [
"ardunn@lbl.gov"
] | ardunn@lbl.gov |
62f74a9cfefeb751139c8a1fb60850c830101bfb | 287792543e5f15cd912661ffe0575d4fc3d03a49 | /backend/src/baserow/core/registries.py | c5f640f0e9fe3b433d04f42104a84869f0fef686 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | jacklicn/baserow | 27f7566a164127d8b4571be4493447347d8aa3ed | 978d9462ededbaa96674a6653028ba19876ea273 | refs/heads/master | 2023-04-02T19:06:20.961729 | 2021-04-08T16:00:37 | 2021-04-08T16:00:37 | 357,790,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,506 | py | from .registry import (
Instance, Registry, ModelInstanceMixin, ModelRegistryMixin, APIUrlsRegistryMixin,
APIUrlsInstanceMixin, ImportExportMixin
)
from .exceptions import ApplicationTypeAlreadyRegistered, ApplicationTypeDoesNotExist
class Plugin(APIUrlsInstanceMixin, Instance):
"""
This abstract class represents a custom plugin that can be added to the plugin
registry. It must be extended so customisation can be done. Each plugin can register
urls to the root and to the api.
The added API urls will be available under the namespace 'api'. So if a url
with name 'example' is returned by the method it will available under
reverse('api:example').
Example:
from django.http import HttpResponse
from baserow.core.registries import Plugin, plugin_registry
def page_1(request):
return HttpResponse('Page 2')
class ExamplePlugin(Plugin):
type = 'a-unique-type-name'
# Will be added to the root.
def get_urls(self):
return [
url(r'^page-1$', page_1, name='page_1')
]
# Will be added to the API.
def get_api_urls(self):
return [
path('application-type/', include(api_urls, namespace=self.type)),
]
plugin_registry.register(ExamplePlugin())
"""
def get_urls(self):
"""
If needed root urls related to the plugin can be added here.
Example:
def get_urls(self):
from . import api_urls
return [
path('some-url/', include(api_urls, namespace=self.type)),
]
# api_urls.py
from django.conf.urls import url
urlpatterns = [
url(r'some-view^$', SomeView.as_view(), name='some_view'),
]
:return: A list containing the urls.
:rtype: list
"""
return []
def user_created(self, user, group, group_invitation):
"""
A hook that is called after a new user has been created. This is the place to
create some data the user can start with. A group has already been created
for the user to that one is passed as a parameter.
:param user: The newly created user.
:type user: User
:param group: The newly created group for the user.
:type group: Group
:param group_invitation: Is provided if the user has signed up using a valid
group invitation token.
:type group_invitation: GroupInvitation
"""
class PluginRegistry(APIUrlsRegistryMixin, Registry):
"""
With the plugin registry it is possible to register new plugins. A plugin is an
abstraction made specifically for Baserow. It allows a plugin developer to
register extra api and root urls.
"""
name = 'plugin'
@property
def urls(self):
"""
Returns a list of all the urls that are in the registered instances. They
are going to be added to the root url config.
:return: The urls of the registered instances.
:rtype: list
"""
urls = []
for types in self.registry.values():
urls += types.get_urls()
return urls
class ApplicationType(APIUrlsInstanceMixin, ModelInstanceMixin, ImportExportMixin,
Instance):
"""
This abstract class represents a custom application that can be added to the
application registry. It must be extended so customisation can be done. Each
application will have his own model that must extend the Application model, this is
needed so that the user can set custom settings per application instance he has
created.
The added API urls will be available under the namespace 'api'. So if a url
with name 'example' is returned by the method it will available under
reverse('api:example').
Example:
from baserow.core.models import Application
from baserow.core.registries import ApplicationType, application_type_registry
class ExampleApplicationModel(Application):
pass
class ExampleApplication(ApplicationType):
type = 'a-unique-type-name'
model_class = ExampleApplicationModel
def get_api_urls(self):
return [
path('application-type/', include(api_urls, namespace=self.type)),
]
application_type_registry.register(ExampleApplication())
"""
instance_serializer_class = None
"""This serializer that is used to serialize the instance model."""
def pre_delete(self, application):
"""
A hook that is called before the application instance is deleted.
:param application: The application model instance that needs to be deleted.
:type application: Application
"""
def export_serialized(self, application):
"""
Exports the application to a serialized dict that can be imported by the
`import_serialized` method. The dict is JSON serializable.
:param application: The application that must be exported.
:type application: Application
:return: The exported and serialized application.
:rtype: dict
"""
return {
'id': application.id,
'name': application.name,
'order': application.order,
'type': self.type
}
def import_serialized(self, group, serialized_values, id_mapping):
"""
Imports the exported serialized application by the `export_serialized` as a new
application to a group.
:param group: The group that the application must be added to.
:type group: Group
:param serialized_values: The exported serialized values by the
`export_serialized` method.
:type serialized_values: dict`
:param id_mapping: The map of exported ids to newly created ids that must be
updated when a new instance has been created.
:type id_mapping: dict
:return: The newly created application.
:rtype: Application
"""
if 'applications' not in id_mapping:
id_mapping['applications'] = {}
serialized_copy = serialized_values.copy()
application_id = serialized_copy.pop('id')
serialized_copy.pop('type')
application = self.model_class.objects.create(group=group, **serialized_copy)
id_mapping['applications'][application_id] = application.id
return application
class ApplicationTypeRegistry(APIUrlsRegistryMixin, ModelRegistryMixin, Registry):
"""
With the application registry it is possible to register new applications. An
application is an abstraction made specifically for Baserow. If added to the
registry a user can create new instances of that application via the app and
register api related urls.
"""
name = 'application'
does_not_exist_exception_class = ApplicationTypeDoesNotExist
already_registered_exception_class = ApplicationTypeAlreadyRegistered
# A default plugin and application registry is created here, this is the one that is
# used throughout the whole Baserow application. To add a new plugin or application use
# these registries.
plugin_registry = PluginRegistry()
application_type_registry = ApplicationTypeRegistry()
| [
"bramw@protonmail.com"
] | bramw@protonmail.com |
58b07d55f2442753f0175fb19c79c359c7b655d5 | 2f0d56cdcc4db54f9484b3942db88d79a4215408 | /.history/Python_Learning/Condiion_20200410231844.py | 807f9f0e50b806d27bf01dda56519bf8f565b699 | [] | no_license | xiangxing98/xiangxing98.github.io | 8571c8ee8509c0bccbb6c2f3740494eedc53e418 | 23618666363ecc6d4acd1a8662ea366ddf2e6155 | refs/heads/master | 2021-11-17T19:00:16.347567 | 2021-11-14T08:35:01 | 2021-11-14T08:35:01 | 33,877,060 | 7 | 1 | null | 2017-07-01T16:42:49 | 2015-04-13T15:35:01 | HTML | UTF-8 | Python | false | false | 3,178 | py | import time
print('如果你想拥有读心术,那选择X教授')
time.sleep(2)
print('如果你想干扰地球磁场,那选择万磁王')
time.sleep(2)
print('如果你想急速自愈能力,野兽般的感知能力,那选择金刚狼')
time.sleep(2)
print('如果你想拥有拥有念力移位和心电感应,那选择凤凰女')
time.sleep(2)
print('如果你想拥有拥有能随意控制气候的能力,那选择暴风女')
time.sleep(2)
print('那么,如果让你来选择的话,你想选择哪个人物?')
time.sleep(2)
print('请在以下六个选项【1 X教授 ;2 万磁王;3 金刚狼 ;4 凤凰女;5 暴风女 ;】中,选择你最想成为的人物吧!')
time.sleep(3)
answer = input('请将对应数字输入在冒号后: ')
if answer == '1':
print('我是教授,通过其能力剥夺并控制他人的思维同时操纵他人的行动。')
time.sleep(3)
elif answer == '2':
print('我X万磁王,通过干扰地球磁场达到飞行的能力。')
time.sleep(3)
elif answer == '3':
print('我是金刚狼,天生双臂长有可伸出体外的利爪')
time.sleep(3)
elif answer == '4':
print('我是凤凰女,预知未来,并能抗拒他人的精神攻击。')
time.sleep(3)
elif answer == '5':
print('我是暴风女,被称作天气女神。')
time.sleep(3)
# 为钱包赋值
money = 1000
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
# 结果:显示‘金刚狼带凤凰女去吃日料’的结果
print('金刚狼带凤凰女去吃日料')
# IndentationError: expected an indented block (缩进错误)
# if
# 为钱包赋值
money = 500
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
# 结果:显示‘你去吃日料’的结果
print('金刚狼带凤凰女去吃日料')
# if else
# 为钱包赋值
money = 1000
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
print('金刚狼带凤凰女去吃日料')
# 条件:当不满足if条件,执行else条件下语句
else:
print('金刚狼带凤凰女去吃KFC')
# 赋值语句:为体重赋值
weight = 98
# if else
# 条件:满足体重超过90(包含90)
if weight>=90:
# 结果:显示‘跑步’的结果
print('她就每天去跑步')
# 条件:当赋值不满足if条件时,执行else下的命令
else:
# 结果:显示‘散步’的结果
print('她就每天去散步')
# 为钱包赋值
money = 999
# if elseif else
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
print('金刚狼带凤凰女去吃日料')
# 条件:如果有800-1000块钱之间(包含800块)
elif money>=800:
print('金刚狼带凤凰女去吃披萨')
#不满足条件
else:
print('金刚狼带凤凰女去吃KFC')
# 今天晚上,小K要加班了。如果加班时间超过10点(包含10点),小K就打车回家;如果加班时间在8点-10点之间(包含8点),小K坐地铁回家;如果加班时间早于8点,小K骑单车回家。 | [
"xiangxing985529@163.com"
] | xiangxing985529@163.com |
3886165407d0ba5f66efa43613de88aafdc77bbe | 5e8e4ca2205ccecf3b5b1ce12ae202883d4a5237 | /anchore_engine/clients/policy_engine/__init__.py | 481856d7a178a192d14edd75658cd2fa2d624924 | [
"Apache-2.0"
] | permissive | jasonwilk/anchore-engine | e6fb35060942688164e93a099559aa9be8900502 | 3b587a597be985cf5895f4a249418855d4be3fae | refs/heads/master | 2020-03-28T04:02:00.007092 | 2018-09-06T14:55:43 | 2018-09-06T14:55:43 | 147,689,738 | 0 | 0 | Apache-2.0 | 2018-09-06T14:53:27 | 2018-09-06T14:52:01 | Python | UTF-8 | Python | false | false | 1,577 | py | import random
import anchore_engine.clients.common
from anchore_engine.subsys.discovery import get_endpoints
from .generated import DefaultApi, configuration, ApiClient
from anchore_engine.subsys import logger
import anchore_engine.configuration.localconfig
SERVICE_NAME = 'policy_engine'
def get_client(host=None, user=None, password=None, verify_ssl=None):
"""
Returns an initialize client withe credentials and endpoint set properly
:param host: hostname including port for the destination, will be looked up if not provided
:param user: username for the request auth
:param password: password for the request auth
:return: initialized client object
"""
if not host:
try:
endpoint = anchore_engine.clients.common.get_service_endpoint((user, password), SERVICE_NAME)
if endpoint:
host = endpoint
else:
raise Exception("cannot find endpoint for service: {}".format(SERVICE_NAME))
except Exception as err:
raise err
config = configuration.Configuration()
if host:
config.host = host
if user:
config.username = user
if password:
config.password = password
if verify_ssl == None:
localconfig = anchore_engine.configuration.localconfig.get_config()
verify_ssl = localconfig.get('internal_ssl_verify', True)
config.verify_ssl = verify_ssl
a = ApiClient(configuration=config)
c = DefaultApi(api_client=a)
#configuration.api_client = None
return c
| [
"nurmi@anchore.com"
] | nurmi@anchore.com |
8153e6afaf7e26c90c5d6e9b2a1aa321af0382d5 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /grafeas/v1/grafeas-v1-py/grafeas/grafeas_v1/types/deployment.py | 69ac2dac990e1a4f42b3f2fae3580581b4827d37 | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,995 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='grafeas.v1',
manifest={
'DeploymentNote',
'DeploymentOccurrence',
},
)
class DeploymentNote(proto.Message):
r"""An artifact that can be deployed in some runtime.
Attributes:
resource_uri (Sequence[str]):
Required. Resource URI for the artifact being
deployed.
"""
resource_uri = proto.RepeatedField(
proto.STRING,
number=1,
)
class DeploymentOccurrence(proto.Message):
r"""The period during which some deployable was active in a
runtime.
Attributes:
user_email (str):
Identity of the user that triggered this
deployment.
deploy_time (google.protobuf.timestamp_pb2.Timestamp):
Required. Beginning of the lifetime of this
deployment.
undeploy_time (google.protobuf.timestamp_pb2.Timestamp):
End of the lifetime of this deployment.
config (str):
Configuration used to create this deployment.
address (str):
Address of the runtime element hosting this
deployment.
resource_uri (Sequence[str]):
Output only. Resource URI for the artifact
being deployed taken from the deployable field
with the same name.
platform (grafeas.grafeas_v1.types.DeploymentOccurrence.Platform):
Platform hosting this deployment.
"""
class Platform(proto.Enum):
r"""Types of platforms."""
PLATFORM_UNSPECIFIED = 0
GKE = 1
FLEX = 2
CUSTOM = 3
user_email = proto.Field(
proto.STRING,
number=1,
)
deploy_time = proto.Field(
proto.MESSAGE,
number=2,
message=timestamp_pb2.Timestamp,
)
undeploy_time = proto.Field(
proto.MESSAGE,
number=3,
message=timestamp_pb2.Timestamp,
)
config = proto.Field(
proto.STRING,
number=4,
)
address = proto.Field(
proto.STRING,
number=5,
)
resource_uri = proto.RepeatedField(
proto.STRING,
number=6,
)
platform = proto.Field(
proto.ENUM,
number=7,
enum=Platform,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
0251b865943a31f95d2d8fcca8ff63221627c2f1 | 59e59cbc24650b557f41c041cbeb8dad10c4d2b1 | /01 Array/169. Majority Element.py | 718cf01d75ceb6a3273c129b36510ee9634562bf | [] | no_license | stungkit/Leetcode-Data-Structures-Algorithms | 5345211f4ceb7dc7651360f0ca0a7f48f2434556 | a3a341369a8acd86c29f8fba642f856d6ea2fd0a | refs/heads/master | 2023-07-26T18:14:17.993112 | 2021-09-13T15:40:47 | 2021-09-13T15:40:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,566 | py | # Given an array of size n, find the majority element. The majority element is the element that appears more than ⌊ n/2 ⌋ times.
# You may assume that the array is non-empty and the majority element always exist in the array.
# Example 1:
# Input: [3,2,3]
# Output: 3
# Example 2:
# Input: [2,2,1,1,1,2,2]
# Output: 2
class Solution:
def majorityElement(self, nums):
nums.sort()
return nums[len(nums)//2]
# Time: O(nlgn)
# Space: O(1) or O(n) which depends on if we want to save the sorted list
from collections import Counter
class Solution:
def majorityElement(self, nums: List[int]) -> int:
count = Counter(nums)
for k,v in count.items():
if v > len(nums)//2:
return k
return -1
# Time: O(n)
# Space: O(n)
class Solution:
def majorityElement(self, nums: List[int]) -> int:
d = {}
for i in nums:
if i not in d:
d[i] = 1
else:
d[i] += 1
for k, v in d.items():
if v > len(nums)//2:
return k
return -1
# Time: O(n)
# Space: O(n)
class Solution:
def majorityElement(self, nums: List[int]) -> int:
d = {}
for i in nums:
d[i] = d.get(i, 0) + 1
for k, v in d.items():
if v > len(nums)//2:
return k
return -1
# Time: O(n)
# Space: O(n) | [
"noreply@github.com"
] | stungkit.noreply@github.com |
5ac4769b6bea10da8fbb4a97ed3d69902d8d9bd6 | c25e8f2fb5fcd7560f50cf77f49353816ceeffab | /Python for Everybody/Course_4_Using_Databases_with_Python/Assignment_19.py | 17cf366315ad22cae6100361d7264372cb300a74 | [] | no_license | koteshrv/Coursera | c7d75928d095142d2f39013708741ea324dd6464 | 5f37ce922b1f76abcd8582f7dd1c7b674162dd64 | refs/heads/master | 2023-02-09T06:52:31.975940 | 2021-01-05T18:10:23 | 2021-01-05T18:10:23 | 294,464,101 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,003 | py | '''
Retrieving GEOData
Download the code from http://www.py4e.com/code3/geodata.zip - then unzip the file and edit where.data to add an address nearby where you live - don't reveal where you live. Then run the geoload.py to lookup all of the entries in where.data (including the new one) and produce the geodata.sqlite. Then run geodump.py to read the database and produce where.js. You can run the programs and then scroll back to take your screen shots when the program finishes. Then open where.html to visualize the map. Take screen shots as described below. Make sure that your added location shows in all three of your screen shots.
This is a relatively simple assignment. Don't take off points for little mistakes. If they seem to have done the assignment give them full credit. Feel free to make suggestions if there are small mistakes. Please keep your comments positive and useful. If you do not take grading seriously, the instructors may delete your response and you will lose points.
'''
| [
"kkotesh100@gmail.com"
] | kkotesh100@gmail.com |
fc679ef509cb54b6f6526cdeede624b74324deaa | e5f194129752f3f89eed53478416d2c92cde0259 | /.cache/Microsoft/Python Language Server/stubs.v4/RAbd3hOzVRIjl5T9N_Z-WymMnylM5TJUMWmbys-xAPg=/speedups.cpython-38-x86_64-linux-gnu.pyi | 227c530185cbd8d826a94731aef6899a79524680 | [] | no_license | stepin-s/st | 1677fc25cb42c36afd76d2e3a48a1c0a5daf1b93 | b4cf346a446d57210197ee7f6f809cbc0a5b8799 | refs/heads/master | 2023-07-27T17:37:39.268414 | 2021-05-25T12:08:10 | 2021-05-25T12:08:10 | 405,090,749 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | pyi | __doc__ = None
__file__ = '/home/st/.local/lib/python3.8/site-packages/tornado/speedups.cpython-38-x86_64-linux-gnu.so'
__name__ = 'tornado.speedups'
__package__ = 'tornado'
def websocket_mask():
pass
| [
"stpn.s@yandex.ru"
] | stpn.s@yandex.ru |
3af7a3b933bf513f93b14404512dd68ad5d4de48 | a689a72d3699883d7b58bd4ee3103373270bd0d5 | /BOJ/Python/BOJ1020.py | 12f8f79a460103398fcfb7620d39798d82a6ee6c | [] | no_license | Oizys18/Algo | 4670748c850dc9472b6cfb9f828a3ccad9c18981 | 45caafe22a8a8c9134e4ff3b227f5f0be94eefe7 | refs/heads/master | 2022-05-11T08:35:06.812539 | 2022-05-07T01:30:41 | 2022-05-07T01:30:41 | 202,690,024 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | import sys
sys.stdin = open('BOJ1020.txt','r')
from pprint import pprint as pp
time = [int(i) for i in input()]
N = len(time)
cnt = {
1:2,
2:5,
3:5,
4:4,
5:5,
6:6,
7:3,
8:7,
9:5,
0:6
}
def count(time):
total = 0
for t in time:
total += cnt[t]
return total
def solve(time):
if count(time) == N:
return time
else:
solve(time+1)
| [
"oizys18@gmail.com"
] | oizys18@gmail.com |
38528b4c400b8e661dc7ca3250f4afcfea523742 | f1e9f557c5d724dcabbfa17903de93bb82767e35 | /py_ffmpeg_snapshot_mp4.py | 260b61a4ce66d6d9b5ea7477b98639ad3e9ab092 | [] | no_license | gregsheu/python | e5e9ff83dc0ce90541591e726c940e8a1f71a3d4 | 4a77295d58a522974ee85b201ab99cdbe410fd08 | refs/heads/master | 2023-08-18T08:30:15.611727 | 2023-08-08T06:55:44 | 2023-08-08T06:55:44 | 181,270,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,075 | py | import requests
import urllib
import ffmpeg
import os
import time
import threading
from requests.auth import HTTPDigestAuth
def convert_jpgmp4(ip):
t = 0
k = 'KingSolarman'
while t < 6:
t = t + 1
for i in range(1, 5):
try:
#r1 = ffmpeg.input('rtsp://admin:admin12345@' + str(v['ip']) + ':554/cam/realmonitor?channel=%s&subtype=0' % i, rtsp_transport = 'tcp')
r1 = ffmpeg.input('rtsp://admin:admin12345@%s:554/cam/realmonitor?channel=%s&subtype=0' % (ip, i), rtsp_transport = 'tcp')
(
ffmpeg
.output(r1, k + '%s-%s.jpg' % (t, i), vframes=1)
.overwrite_output()
.run()
)
except:
print('error on %s cam' % i)
#for t in range(1, 5):
for i in range(1, 5):
(
ffmpeg.input('./KingSolarman*-%s.jpg' % i, pattern_type='glob', framerate=1)
#.crop(0, 0, 1280, 720)
.filter('scale',1280, 720)
.filter('setsar', sar=1/1)
.drawtext('King Solarman Inc.', 10, 400, fontcolor='red', fontsize=48, fontfile='/usr/share/fonts/truetype/freefont/FreeSansBold.ttf')
.output('ks-gif-%s.mp4' % i, t=5, r=29.97)
.overwrite_output()
.run()
)
v1 = ffmpeg.input('ks-gif-%s.mp4' % i)
(
ffmpeg
.concat(
v1.setpts('PTS-STARTPTS'),
#a1.filter('atrim', 45, 55).filter('asetpts', 'PTS-STARTPTS').filter('volume', 0.8),
v=1,
a=0,
)
.output('KingSolarmanTW-%s.mp4' % i)
.overwrite_output()
.run()
)
def make_tile(ip):
in1 = None
in2 = None
in3 = None
in4 = None
k = 'KingSolarmanFront'
for i in range(1, 5):
try:
#r1 = ffmpeg.input('rtsp://admin:admin12345@' + str(v['ip']) + ':554/cam/realmonitor?channel=%s&subtype=0' % i, rtsp_transport = 'tcp')
r1 = ffmpeg.input('rtsp://admin:admin12345@%s:554/cam/realmonitor?channel=%s&subtype=0' % (ip, i), rtsp_transport = 'tcp')
(
ffmpeg
.output(r1, k + '-%s.jpg' % i, vframes=1)
.overwrite_output()
.run()
)
except:
print('error on %s cam' % i)
in1 = ffmpeg.input(k + '-1.jpg')
in2 = ffmpeg.input(k + '-2.jpg')
in3 = ffmpeg.input(k + '-3.jpg')
in4 = ffmpeg.input(k + '-4.jpg')
in5 = ffmpeg.input(k + '-t1.jpg')
in6 = ffmpeg.input(k + '-t2.jpg')
(
ffmpeg
.concat(
in1.filter('scale', '1280', '720'),
in2.filter('scale', '1280', '720'),
)
.filter('tile', '1x2')
.filter('setsar', '16', '9')
.output(k + '-t1.jpg')
.overwrite_output()
.run()
)
(
ffmpeg
.concat(
in3.filter('scale', '1280', '720'),
in4.filter('scale', '1280', '720'),
)
.filter('tile', '1x2')
.filter('setsar', '16', '9')
.output(k + '-t2.jpg')
.overwrite_output()
.run()
)
(
ffmpeg
.concat(
in5,
in6,
)
.filter('tile', '2x1')
.filter('setsar', '16', '9')
.output(k + '-tile.jpg')
.overwrite_output()
.run()
)
def convert_dav(ip, i, eventstart, eventend):
newtime = eventstart.replace(' ', '')
newtime = newtime.replace(':', '')
payload = {'action': 'startLoad', 'channel': i, 'startTime': eventstart, 'endTime': eventend, 'subtype': '0'}
param = urllib.parse.urlencode(payload, quote_via=urllib.parse.quote)
video_url = 'http://%s/cgi-bin/loadfile.cgi?' % ip
user = 'admin'
password = 'admin12345'
video_resp = requests.get(video_url, params=param, auth=HTTPDigestAuth(user, password), stream=True)
with open('tripvideo-%s-%s.dav' % (i, newtime), 'wb') as f:
f.write(video_resp.content)
tripvideo = 'tripvideo-%s-%s.dav' % (i, newtime)
r = ffmpeg.input(tripvideo)
(
ffmpeg
.output(r, tripvideo[0:-4]+'.mp4', format='mp4')
.overwrite_output()
.run()
)
def main():
ip = '166.149.88.121'
#ip = '192.168.1.109'
#make_tile(ip)
#convert_jpgmp4(ip)
t = 0
cur_time = time.time()
eventstart = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(cur_time-2))
eventend = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(cur_time+5))
curtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(cur_time))
print(curtime)
print(eventstart)
print(eventend)
time.sleep(11)
convert_dav(ip, 1, eventstart, eventend)
convert_dav(ip, 2, eventstart, eventend)
convert_dav(ip, 3, eventstart, eventend)
convert_dav(ip, 4, eventstart, eventend)
#t1 = threading.Thread(target=get_con_dav, args=(ip, 1, eventstart, eventend,))
#t1.start()
if __name__ == '__main__':
main()
| [
"greg@mymacpro.bluefinops.io"
] | greg@mymacpro.bluefinops.io |
034aa04bd4df8e40af2fcc06330674f531b2564b | 1061216c2c33c1ed4ffb33e6211565575957e48f | /python-blueplanet/app/openapi_server/models/realm_domain.py | 5d2b636ee80ac8d5bad11d57f9865d8f0818a6ce | [] | no_license | MSurfer20/test2 | be9532f54839e8f58b60a8e4587348c2810ecdb9 | 13b35d72f33302fa532aea189e8f532272f1f799 | refs/heads/main | 2023-07-03T04:19:57.548080 | 2021-08-11T19:16:42 | 2021-08-11T19:16:42 | 393,920,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,622 | py | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from app.openapi_server.models.base_model_ import Model
from openapi_server import util
class RealmDomain(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, domain: str=None, allow_subdomains: bool=None): # noqa: E501
"""RealmDomain - a model defined in Swagger
:param domain: The domain of this RealmDomain. # noqa: E501
:type domain: str
:param allow_subdomains: The allow_subdomains of this RealmDomain. # noqa: E501
:type allow_subdomains: bool
"""
self.swagger_types = {
'domain': str,
'allow_subdomains': bool
}
self.attribute_map = {
'domain': 'domain',
'allow_subdomains': 'allow_subdomains'
}
self._domain = domain
self._allow_subdomains = allow_subdomains
@classmethod
def from_dict(cls, dikt) -> 'RealmDomain':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The RealmDomain of this RealmDomain. # noqa: E501
:rtype: RealmDomain
"""
return util.deserialize_model(dikt, cls)
@property
def domain(self) -> str:
"""Gets the domain of this RealmDomain.
The new allowed domain. # noqa: E501
:return: The domain of this RealmDomain.
:rtype: str
"""
return self._domain
@domain.setter
def domain(self, domain: str):
"""Sets the domain of this RealmDomain.
The new allowed domain. # noqa: E501
:param domain: The domain of this RealmDomain.
:type domain: str
"""
self._domain = domain
@property
def allow_subdomains(self) -> bool:
"""Gets the allow_subdomains of this RealmDomain.
Whether subdomains are allowed for this domain. # noqa: E501
:return: The allow_subdomains of this RealmDomain.
:rtype: bool
"""
return self._allow_subdomains
@allow_subdomains.setter
def allow_subdomains(self, allow_subdomains: bool):
"""Sets the allow_subdomains of this RealmDomain.
Whether subdomains are allowed for this domain. # noqa: E501
:param allow_subdomains: The allow_subdomains of this RealmDomain.
:type allow_subdomains: bool
"""
self._allow_subdomains = allow_subdomains
| [
"suyash.mathur@research.iiit.ac.in"
] | suyash.mathur@research.iiit.ac.in |
fb9d4914e6c20b05a500c61aaa95e60cb927b765 | 71dea7d99e3619f133826a134f8d4bb644fa3f91 | /libs/groupdocs_conversion_cloud/models/pdf_convert_options.py | 53adb7427fa4bdedd75a04a4b8becbd023e6bdca | [
"MIT"
] | permissive | rocketbot-cl/pdf2word | cd26aa77a2e38efaebd2180e8b097a4e9d947950 | 794d929651673155a96330b8b45d0f7c69856076 | refs/heads/master | 2022-12-30T16:16:12.908538 | 2022-12-12T22:08:08 | 2022-12-12T22:08:08 | 230,915,875 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 41,188 | py | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="PdfConvertOptions.py">
# Copyright (c) 2003-2019 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
from groupdocs_conversion_cloud.models import ConvertOptions
class PdfConvertOptions(ConvertOptions):
"""
Options for to PDF conversion
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'width': 'int',
'height': 'int',
'dpi': 'float',
'password': 'str',
'margin_top': 'int',
'margin_bottom': 'int',
'margin_left': 'int',
'margin_right': 'int',
'pdf_format': 'str',
'remove_pdfa_compliance': 'bool',
'zoom': 'int',
'linearize': 'bool',
'link_duplicate_streams': 'bool',
'remove_unused_objects': 'bool',
'remove_unused_streams': 'bool',
'compress_images': 'bool',
'image_quality': 'int',
'unembed_fonts': 'bool',
'grayscale': 'bool',
'center_window': 'bool',
'direction': 'str',
'display_doc_title': 'bool',
'fit_window': 'bool',
'hide_menubar': 'bool',
'hide_tool_bar': 'bool',
'hide_window_ui': 'bool',
'non_full_screen_page_mode': 'str',
'page_layout': 'str',
'page_mode': 'str',
'bookmarks_outline_level': 'int',
'headings_outline_levels': 'int',
'expanded_outline_levels': 'int',
'rotate': 'str',
'watermark_options': 'WatermarkOptions'
}
attribute_map = {
'width': 'Width',
'height': 'Height',
'dpi': 'Dpi',
'password': 'Password',
'margin_top': 'MarginTop',
'margin_bottom': 'MarginBottom',
'margin_left': 'MarginLeft',
'margin_right': 'MarginRight',
'pdf_format': 'PdfFormat',
'remove_pdfa_compliance': 'RemovePdfaCompliance',
'zoom': 'Zoom',
'linearize': 'Linearize',
'link_duplicate_streams': 'LinkDuplicateStreams',
'remove_unused_objects': 'RemoveUnusedObjects',
'remove_unused_streams': 'RemoveUnusedStreams',
'compress_images': 'CompressImages',
'image_quality': 'ImageQuality',
'unembed_fonts': 'UnembedFonts',
'grayscale': 'Grayscale',
'center_window': 'CenterWindow',
'direction': 'Direction',
'display_doc_title': 'DisplayDocTitle',
'fit_window': 'FitWindow',
'hide_menubar': 'HideMenubar',
'hide_tool_bar': 'HideToolBar',
'hide_window_ui': 'HideWindowUI',
'non_full_screen_page_mode': 'NonFullScreenPageMode',
'page_layout': 'PageLayout',
'page_mode': 'PageMode',
'bookmarks_outline_level': 'BookmarksOutlineLevel',
'headings_outline_levels': 'HeadingsOutlineLevels',
'expanded_outline_levels': 'ExpandedOutlineLevels',
'rotate': 'Rotate',
'watermark_options': 'WatermarkOptions'
}
def __init__(self, width=None, height=None, dpi=None, password=None, margin_top=None, margin_bottom=None, margin_left=None, margin_right=None, pdf_format=None, remove_pdfa_compliance=None, zoom=None, linearize=None, link_duplicate_streams=None, remove_unused_objects=None, remove_unused_streams=None, compress_images=None, image_quality=None, unembed_fonts=None, grayscale=None, center_window=None, direction=None, display_doc_title=None, fit_window=None, hide_menubar=None, hide_tool_bar=None, hide_window_ui=None, non_full_screen_page_mode=None, page_layout=None, page_mode=None, bookmarks_outline_level=None, headings_outline_levels=None, expanded_outline_levels=None, rotate=None, watermark_options=None, **kwargs): # noqa: E501
"""Initializes new instance of PdfConvertOptions""" # noqa: E501
self._width = None
self._height = None
self._dpi = None
self._password = None
self._margin_top = None
self._margin_bottom = None
self._margin_left = None
self._margin_right = None
self._pdf_format = None
self._remove_pdfa_compliance = None
self._zoom = None
self._linearize = None
self._link_duplicate_streams = None
self._remove_unused_objects = None
self._remove_unused_streams = None
self._compress_images = None
self._image_quality = None
self._unembed_fonts = None
self._grayscale = None
self._center_window = None
self._direction = None
self._display_doc_title = None
self._fit_window = None
self._hide_menubar = None
self._hide_tool_bar = None
self._hide_window_ui = None
self._non_full_screen_page_mode = None
self._page_layout = None
self._page_mode = None
self._bookmarks_outline_level = None
self._headings_outline_levels = None
self._expanded_outline_levels = None
self._rotate = None
self._watermark_options = None
if width is not None:
self.width = width
if height is not None:
self.height = height
if dpi is not None:
self.dpi = dpi
if password is not None:
self.password = password
if margin_top is not None:
self.margin_top = margin_top
if margin_bottom is not None:
self.margin_bottom = margin_bottom
if margin_left is not None:
self.margin_left = margin_left
if margin_right is not None:
self.margin_right = margin_right
if pdf_format is not None:
self.pdf_format = pdf_format
if remove_pdfa_compliance is not None:
self.remove_pdfa_compliance = remove_pdfa_compliance
if zoom is not None:
self.zoom = zoom
if linearize is not None:
self.linearize = linearize
if link_duplicate_streams is not None:
self.link_duplicate_streams = link_duplicate_streams
if remove_unused_objects is not None:
self.remove_unused_objects = remove_unused_objects
if remove_unused_streams is not None:
self.remove_unused_streams = remove_unused_streams
if compress_images is not None:
self.compress_images = compress_images
if image_quality is not None:
self.image_quality = image_quality
if unembed_fonts is not None:
self.unembed_fonts = unembed_fonts
if grayscale is not None:
self.grayscale = grayscale
if center_window is not None:
self.center_window = center_window
if direction is not None:
self.direction = direction
if display_doc_title is not None:
self.display_doc_title = display_doc_title
if fit_window is not None:
self.fit_window = fit_window
if hide_menubar is not None:
self.hide_menubar = hide_menubar
if hide_tool_bar is not None:
self.hide_tool_bar = hide_tool_bar
if hide_window_ui is not None:
self.hide_window_ui = hide_window_ui
if non_full_screen_page_mode is not None:
self.non_full_screen_page_mode = non_full_screen_page_mode
if page_layout is not None:
self.page_layout = page_layout
if page_mode is not None:
self.page_mode = page_mode
if bookmarks_outline_level is not None:
self.bookmarks_outline_level = bookmarks_outline_level
if headings_outline_levels is not None:
self.headings_outline_levels = headings_outline_levels
if expanded_outline_levels is not None:
self.expanded_outline_levels = expanded_outline_levels
if rotate is not None:
self.rotate = rotate
if watermark_options is not None:
self.watermark_options = watermark_options
base = super(PdfConvertOptions, self)
base.__init__(**kwargs)
self.swagger_types.update(base.swagger_types)
self.attribute_map.update(base.attribute_map)
@property
def width(self):
"""
Gets the width. # noqa: E501
Desired page width in pixels after conversion # noqa: E501
:return: The width. # noqa: E501
:rtype: int
"""
return self._width
@width.setter
def width(self, width):
"""
Sets the width.
Desired page width in pixels after conversion # noqa: E501
:param width: The width. # noqa: E501
:type: int
"""
if width is None:
raise ValueError("Invalid value for `width`, must not be `None`") # noqa: E501
self._width = width
@property
def height(self):
"""
Gets the height. # noqa: E501
Desired page height in pixels after conversion # noqa: E501
:return: The height. # noqa: E501
:rtype: int
"""
return self._height
@height.setter
def height(self, height):
"""
Sets the height.
Desired page height in pixels after conversion # noqa: E501
:param height: The height. # noqa: E501
:type: int
"""
if height is None:
raise ValueError("Invalid value for `height`, must not be `None`") # noqa: E501
self._height = height
@property
def dpi(self):
"""
Gets the dpi. # noqa: E501
Desired page DPI after conversion. The default resolution is: 96dpi # noqa: E501
:return: The dpi. # noqa: E501
:rtype: float
"""
return self._dpi
@dpi.setter
def dpi(self, dpi):
"""
Sets the dpi.
Desired page DPI after conversion. The default resolution is: 96dpi # noqa: E501
:param dpi: The dpi. # noqa: E501
:type: float
"""
if dpi is None:
raise ValueError("Invalid value for `dpi`, must not be `None`") # noqa: E501
self._dpi = dpi
@property
def password(self):
"""
Gets the password. # noqa: E501
Set this property if you want to protect the converted document with a password # noqa: E501
:return: The password. # noqa: E501
:rtype: str
"""
return self._password
@password.setter
def password(self, password):
"""
Sets the password.
Set this property if you want to protect the converted document with a password # noqa: E501
:param password: The password. # noqa: E501
:type: str
"""
self._password = password
@property
def margin_top(self):
"""
Gets the margin_top. # noqa: E501
Desired page top margin in pixels after conversion # noqa: E501
:return: The margin_top. # noqa: E501
:rtype: int
"""
return self._margin_top
@margin_top.setter
def margin_top(self, margin_top):
"""
Sets the margin_top.
Desired page top margin in pixels after conversion # noqa: E501
:param margin_top: The margin_top. # noqa: E501
:type: int
"""
if margin_top is None:
raise ValueError("Invalid value for `margin_top`, must not be `None`") # noqa: E501
self._margin_top = margin_top
@property
def margin_bottom(self):
"""
Gets the margin_bottom. # noqa: E501
Desired page bottom margin in pixels after conversion # noqa: E501
:return: The margin_bottom. # noqa: E501
:rtype: int
"""
return self._margin_bottom
@margin_bottom.setter
def margin_bottom(self, margin_bottom):
"""
Sets the margin_bottom.
Desired page bottom margin in pixels after conversion # noqa: E501
:param margin_bottom: The margin_bottom. # noqa: E501
:type: int
"""
if margin_bottom is None:
raise ValueError("Invalid value for `margin_bottom`, must not be `None`") # noqa: E501
self._margin_bottom = margin_bottom
@property
def margin_left(self):
"""
Gets the margin_left. # noqa: E501
Desired page left margin in pixels after conversion # noqa: E501
:return: The margin_left. # noqa: E501
:rtype: int
"""
return self._margin_left
@margin_left.setter
def margin_left(self, margin_left):
"""
Sets the margin_left.
Desired page left margin in pixels after conversion # noqa: E501
:param margin_left: The margin_left. # noqa: E501
:type: int
"""
if margin_left is None:
raise ValueError("Invalid value for `margin_left`, must not be `None`") # noqa: E501
self._margin_left = margin_left
@property
def margin_right(self):
"""
Gets the margin_right. # noqa: E501
Desired page right margin in pixels after conversion # noqa: E501
:return: The margin_right. # noqa: E501
:rtype: int
"""
return self._margin_right
@margin_right.setter
def margin_right(self, margin_right):
"""
Sets the margin_right.
Desired page right margin in pixels after conversion # noqa: E501
:param margin_right: The margin_right. # noqa: E501
:type: int
"""
if margin_right is None:
raise ValueError("Invalid value for `margin_right`, must not be `None`") # noqa: E501
self._margin_right = margin_right
@property
def pdf_format(self):
"""
Gets the pdf_format. # noqa: E501
Set the pdf format of the converted document. # noqa: E501
:return: The pdf_format. # noqa: E501
:rtype: str
"""
return self._pdf_format
@pdf_format.setter
def pdf_format(self, pdf_format):
"""
Sets the pdf_format.
Set the pdf format of the converted document. # noqa: E501
:param pdf_format: The pdf_format. # noqa: E501
:type: str
"""
if pdf_format is None:
raise ValueError("Invalid value for `pdf_format`, must not be `None`") # noqa: E501
allowed_values = ["Default", "PdfA_1A", "PdfA_1B", "PdfA_2A", "PdfA_3A", "PdfA_2B", "PdfA_2U", "PdfA_3B", "PdfA_3U", "v1_3", "v1_4", "v1_5", "v1_6", "v1_7", "PdfX_1A", "PdfX3"] # noqa: E501
if not pdf_format.isdigit():
if pdf_format not in allowed_values:
raise ValueError(
"Invalid value for `pdf_format` ({0}), must be one of {1}" # noqa: E501
.format(pdf_format, allowed_values))
self._pdf_format = pdf_format
else:
self._pdf_format = allowed_values[int(pdf_format) if six.PY3 else long(pdf_format)]
@property
def remove_pdfa_compliance(self):
"""
Gets the remove_pdfa_compliance. # noqa: E501
Remove Pdf-A Compliance # noqa: E501
:return: The remove_pdfa_compliance. # noqa: E501
:rtype: bool
"""
return self._remove_pdfa_compliance
@remove_pdfa_compliance.setter
def remove_pdfa_compliance(self, remove_pdfa_compliance):
"""
Sets the remove_pdfa_compliance.
Remove Pdf-A Compliance # noqa: E501
:param remove_pdfa_compliance: The remove_pdfa_compliance. # noqa: E501
:type: bool
"""
if remove_pdfa_compliance is None:
raise ValueError("Invalid value for `remove_pdfa_compliance`, must not be `None`") # noqa: E501
self._remove_pdfa_compliance = remove_pdfa_compliance
@property
def zoom(self):
"""
Gets the zoom. # noqa: E501
Specifies the zoom level in percentage. Default is 100. # noqa: E501
:return: The zoom. # noqa: E501
:rtype: int
"""
return self._zoom
@zoom.setter
def zoom(self, zoom):
"""
Sets the zoom.
Specifies the zoom level in percentage. Default is 100. # noqa: E501
:param zoom: The zoom. # noqa: E501
:type: int
"""
if zoom is None:
raise ValueError("Invalid value for `zoom`, must not be `None`") # noqa: E501
self._zoom = zoom
@property
def linearize(self):
"""
Gets the linearize. # noqa: E501
Linearize PDF Document for the Web # noqa: E501
:return: The linearize. # noqa: E501
:rtype: bool
"""
return self._linearize
@linearize.setter
def linearize(self, linearize):
"""
Sets the linearize.
Linearize PDF Document for the Web # noqa: E501
:param linearize: The linearize. # noqa: E501
:type: bool
"""
if linearize is None:
raise ValueError("Invalid value for `linearize`, must not be `None`") # noqa: E501
self._linearize = linearize
@property
def link_duplicate_streams(self):
"""
Gets the link_duplicate_streams. # noqa: E501
Link duplicate streams # noqa: E501
:return: The link_duplicate_streams. # noqa: E501
:rtype: bool
"""
return self._link_duplicate_streams
@link_duplicate_streams.setter
def link_duplicate_streams(self, link_duplicate_streams):
"""
Sets the link_duplicate_streams.
Link duplicate streams # noqa: E501
:param link_duplicate_streams: The link_duplicate_streams. # noqa: E501
:type: bool
"""
if link_duplicate_streams is None:
raise ValueError("Invalid value for `link_duplicate_streams`, must not be `None`") # noqa: E501
self._link_duplicate_streams = link_duplicate_streams
@property
def remove_unused_objects(self):
"""
Gets the remove_unused_objects. # noqa: E501
Remove unused objects # noqa: E501
:return: The remove_unused_objects. # noqa: E501
:rtype: bool
"""
return self._remove_unused_objects
@remove_unused_objects.setter
def remove_unused_objects(self, remove_unused_objects):
"""
Sets the remove_unused_objects.
Remove unused objects # noqa: E501
:param remove_unused_objects: The remove_unused_objects. # noqa: E501
:type: bool
"""
if remove_unused_objects is None:
raise ValueError("Invalid value for `remove_unused_objects`, must not be `None`") # noqa: E501
self._remove_unused_objects = remove_unused_objects
@property
def remove_unused_streams(self):
"""
Gets the remove_unused_streams. # noqa: E501
Remove unused streams # noqa: E501
:return: The remove_unused_streams. # noqa: E501
:rtype: bool
"""
return self._remove_unused_streams
@remove_unused_streams.setter
def remove_unused_streams(self, remove_unused_streams):
"""
Sets the remove_unused_streams.
Remove unused streams # noqa: E501
:param remove_unused_streams: The remove_unused_streams. # noqa: E501
:type: bool
"""
if remove_unused_streams is None:
raise ValueError("Invalid value for `remove_unused_streams`, must not be `None`") # noqa: E501
self._remove_unused_streams = remove_unused_streams
@property
def compress_images(self):
"""
Gets the compress_images. # noqa: E501
If CompressImages set to true, all images in the document are recompressed. The compression is defined by the ImageQuality property. # noqa: E501
:return: The compress_images. # noqa: E501
:rtype: bool
"""
return self._compress_images
@compress_images.setter
def compress_images(self, compress_images):
"""
Sets the compress_images.
If CompressImages set to true, all images in the document are recompressed. The compression is defined by the ImageQuality property. # noqa: E501
:param compress_images: The compress_images. # noqa: E501
:type: bool
"""
if compress_images is None:
raise ValueError("Invalid value for `compress_images`, must not be `None`") # noqa: E501
self._compress_images = compress_images
@property
def image_quality(self):
"""
Gets the image_quality. # noqa: E501
Value in percent where 100% is unchanged quality and image size. To decrease the image size, use ImageQuality less than 100 # noqa: E501
:return: The image_quality. # noqa: E501
:rtype: int
"""
return self._image_quality
@image_quality.setter
def image_quality(self, image_quality):
"""
Sets the image_quality.
Value in percent where 100% is unchanged quality and image size. To decrease the image size, use ImageQuality less than 100 # noqa: E501
:param image_quality: The image_quality. # noqa: E501
:type: int
"""
if image_quality is None:
raise ValueError("Invalid value for `image_quality`, must not be `None`") # noqa: E501
self._image_quality = image_quality
@property
def unembed_fonts(self):
"""
Gets the unembed_fonts. # noqa: E501
Make fonts not embedded if set to true # noqa: E501
:return: The unembed_fonts. # noqa: E501
:rtype: bool
"""
return self._unembed_fonts
@unembed_fonts.setter
def unembed_fonts(self, unembed_fonts):
"""
Sets the unembed_fonts.
Make fonts not embedded if set to true # noqa: E501
:param unembed_fonts: The unembed_fonts. # noqa: E501
:type: bool
"""
if unembed_fonts is None:
raise ValueError("Invalid value for `unembed_fonts`, must not be `None`") # noqa: E501
self._unembed_fonts = unembed_fonts
@property
def grayscale(self):
"""
Gets the grayscale. # noqa: E501
Convert a PDF from RGB colorspace to Grayscale # noqa: E501
:return: The grayscale. # noqa: E501
:rtype: bool
"""
return self._grayscale
@grayscale.setter
def grayscale(self, grayscale):
"""
Sets the grayscale.
Convert a PDF from RGB colorspace to Grayscale # noqa: E501
:param grayscale: The grayscale. # noqa: E501
:type: bool
"""
if grayscale is None:
raise ValueError("Invalid value for `grayscale`, must not be `None`") # noqa: E501
self._grayscale = grayscale
@property
def center_window(self):
"""
Gets the center_window. # noqa: E501
Specify whether position of the document's window will be centered on the screen. Default: false. # noqa: E501
:return: The center_window. # noqa: E501
:rtype: bool
"""
return self._center_window
@center_window.setter
def center_window(self, center_window):
"""
Sets the center_window.
Specify whether position of the document's window will be centered on the screen. Default: false. # noqa: E501
:param center_window: The center_window. # noqa: E501
:type: bool
"""
if center_window is None:
raise ValueError("Invalid value for `center_window`, must not be `None`") # noqa: E501
self._center_window = center_window
@property
def direction(self):
"""
Gets the direction. # noqa: E501
Sets reading order of text: L2R (left to right) or R2L (right to left). Default: L2R. # noqa: E501
:return: The direction. # noqa: E501
:rtype: str
"""
return self._direction
@direction.setter
def direction(self, direction):
"""
Sets the direction.
Sets reading order of text: L2R (left to right) or R2L (right to left). Default: L2R. # noqa: E501
:param direction: The direction. # noqa: E501
:type: str
"""
if direction is None:
raise ValueError("Invalid value for `direction`, must not be `None`") # noqa: E501
allowed_values = ["L2R", "R2L"] # noqa: E501
if not direction.isdigit():
if direction not in allowed_values:
raise ValueError(
"Invalid value for `direction` ({0}), must be one of {1}" # noqa: E501
.format(direction, allowed_values))
self._direction = direction
else:
self._direction = allowed_values[int(direction) if six.PY3 else long(direction)]
@property
def display_doc_title(self):
"""
Gets the display_doc_title. # noqa: E501
Specifying whether document's window title bar should display document title. Default: false. # noqa: E501
:return: The display_doc_title. # noqa: E501
:rtype: bool
"""
return self._display_doc_title
@display_doc_title.setter
def display_doc_title(self, display_doc_title):
"""
Sets the display_doc_title.
Specifying whether document's window title bar should display document title. Default: false. # noqa: E501
:param display_doc_title: The display_doc_title. # noqa: E501
:type: bool
"""
if display_doc_title is None:
raise ValueError("Invalid value for `display_doc_title`, must not be `None`") # noqa: E501
self._display_doc_title = display_doc_title
@property
def fit_window(self):
"""
Gets the fit_window. # noqa: E501
Specify whether document window must be resized to fit the first displayed page. Default: false. # noqa: E501
:return: The fit_window. # noqa: E501
:rtype: bool
"""
return self._fit_window
@fit_window.setter
def fit_window(self, fit_window):
"""
Sets the fit_window.
Specify whether document window must be resized to fit the first displayed page. Default: false. # noqa: E501
:param fit_window: The fit_window. # noqa: E501
:type: bool
"""
if fit_window is None:
raise ValueError("Invalid value for `fit_window`, must not be `None`") # noqa: E501
self._fit_window = fit_window
@property
def hide_menubar(self):
"""
Gets the hide_menubar. # noqa: E501
Specify whether menu bar should be hidden when document is active. Default: false. # noqa: E501
:return: The hide_menubar. # noqa: E501
:rtype: bool
"""
return self._hide_menubar
@hide_menubar.setter
def hide_menubar(self, hide_menubar):
"""
Sets the hide_menubar.
Specify whether menu bar should be hidden when document is active. Default: false. # noqa: E501
:param hide_menubar: The hide_menubar. # noqa: E501
:type: bool
"""
if hide_menubar is None:
raise ValueError("Invalid value for `hide_menubar`, must not be `None`") # noqa: E501
self._hide_menubar = hide_menubar
@property
def hide_tool_bar(self):
"""
Gets the hide_tool_bar. # noqa: E501
Specifying whether toolbar should be hidden when document is active. Default: false. # noqa: E501
:return: The hide_tool_bar. # noqa: E501
:rtype: bool
"""
return self._hide_tool_bar
@hide_tool_bar.setter
def hide_tool_bar(self, hide_tool_bar):
"""
Sets the hide_tool_bar.
Specifying whether toolbar should be hidden when document is active. Default: false. # noqa: E501
:param hide_tool_bar: The hide_tool_bar. # noqa: E501
:type: bool
"""
if hide_tool_bar is None:
raise ValueError("Invalid value for `hide_tool_bar`, must not be `None`") # noqa: E501
self._hide_tool_bar = hide_tool_bar
@property
def hide_window_ui(self):
"""
Gets the hide_window_ui. # noqa: E501
Specify whether user interface elements should be hidden when document is active. Default: false. # noqa: E501
:return: The hide_window_ui. # noqa: E501
:rtype: bool
"""
return self._hide_window_ui
@hide_window_ui.setter
def hide_window_ui(self, hide_window_ui):
"""
Sets the hide_window_ui.
Specify whether user interface elements should be hidden when document is active. Default: false. # noqa: E501
:param hide_window_ui: The hide_window_ui. # noqa: E501
:type: bool
"""
if hide_window_ui is None:
raise ValueError("Invalid value for `hide_window_ui`, must not be `None`") # noqa: E501
self._hide_window_ui = hide_window_ui
@property
def non_full_screen_page_mode(self):
"""
Gets the non_full_screen_page_mode. # noqa: E501
Sets page mode, specifying how to display the document on exiting full-screen mode. # noqa: E501
:return: The non_full_screen_page_mode. # noqa: E501
:rtype: str
"""
return self._non_full_screen_page_mode
@non_full_screen_page_mode.setter
def non_full_screen_page_mode(self, non_full_screen_page_mode):
"""
Sets the non_full_screen_page_mode.
Sets page mode, specifying how to display the document on exiting full-screen mode. # noqa: E501
:param non_full_screen_page_mode: The non_full_screen_page_mode. # noqa: E501
:type: str
"""
if non_full_screen_page_mode is None:
raise ValueError("Invalid value for `non_full_screen_page_mode`, must not be `None`") # noqa: E501
allowed_values = ["UseNone", "UseOutlines", "UseThumbs", "FullScreen", "UseOC", "UseAttachments"] # noqa: E501
if not non_full_screen_page_mode.isdigit():
if non_full_screen_page_mode not in allowed_values:
raise ValueError(
"Invalid value for `non_full_screen_page_mode` ({0}), must be one of {1}" # noqa: E501
.format(non_full_screen_page_mode, allowed_values))
self._non_full_screen_page_mode = non_full_screen_page_mode
else:
self._non_full_screen_page_mode = allowed_values[int(non_full_screen_page_mode) if six.PY3 else long(non_full_screen_page_mode)]
@property
def page_layout(self):
"""
Gets the page_layout. # noqa: E501
Sets page layout which shall be used when the document is opened. # noqa: E501
:return: The page_layout. # noqa: E501
:rtype: str
"""
return self._page_layout
@page_layout.setter
def page_layout(self, page_layout):
"""
Sets the page_layout.
Sets page layout which shall be used when the document is opened. # noqa: E501
:param page_layout: The page_layout. # noqa: E501
:type: str
"""
if page_layout is None:
raise ValueError("Invalid value for `page_layout`, must not be `None`") # noqa: E501
allowed_values = ["Default", "SinglePage", "OneColumn", "TwoColumnLeft", "TwoColumnRight", "TwoPageLeft", "TwoPageRight"] # noqa: E501
if not page_layout.isdigit():
if page_layout not in allowed_values:
raise ValueError(
"Invalid value for `page_layout` ({0}), must be one of {1}" # noqa: E501
.format(page_layout, allowed_values))
self._page_layout = page_layout
else:
self._page_layout = allowed_values[int(page_layout) if six.PY3 else long(page_layout)]
@property
def page_mode(self):
"""
Gets the page_mode. # noqa: E501
Sets page mode, specifying how document should be displayed when opened. # noqa: E501
:return: The page_mode. # noqa: E501
:rtype: str
"""
return self._page_mode
@page_mode.setter
def page_mode(self, page_mode):
"""
Sets the page_mode.
Sets page mode, specifying how document should be displayed when opened. # noqa: E501
:param page_mode: The page_mode. # noqa: E501
:type: str
"""
if page_mode is None:
raise ValueError("Invalid value for `page_mode`, must not be `None`") # noqa: E501
allowed_values = ["UseNone", "UseOutlines", "UseThumbs", "FullScreen", "UseOC", "UseAttachments"] # noqa: E501
if not page_mode.isdigit():
if page_mode not in allowed_values:
raise ValueError(
"Invalid value for `page_mode` ({0}), must be one of {1}" # noqa: E501
.format(page_mode, allowed_values))
self._page_mode = page_mode
else:
self._page_mode = allowed_values[int(page_mode) if six.PY3 else long(page_mode)]
@property
def bookmarks_outline_level(self):
"""
Gets the bookmarks_outline_level. # noqa: E501
Specifies the default level in the document outline at which to display Word bookmarks. Default is 0. Valid range is 0 to 9. # noqa: E501
:return: The bookmarks_outline_level. # noqa: E501
:rtype: int
"""
return self._bookmarks_outline_level
@bookmarks_outline_level.setter
def bookmarks_outline_level(self, bookmarks_outline_level):
"""
Sets the bookmarks_outline_level.
Specifies the default level in the document outline at which to display Word bookmarks. Default is 0. Valid range is 0 to 9. # noqa: E501
:param bookmarks_outline_level: The bookmarks_outline_level. # noqa: E501
:type: int
"""
if bookmarks_outline_level is None:
raise ValueError("Invalid value for `bookmarks_outline_level`, must not be `None`") # noqa: E501
self._bookmarks_outline_level = bookmarks_outline_level
@property
def headings_outline_levels(self):
"""
Gets the headings_outline_levels. # noqa: E501
Specifies how many levels of headings (paragraphs formatted with the Heading styles) to include in the document outline. Default is 0. Valid range is 0 to 9. # noqa: E501
:return: The headings_outline_levels. # noqa: E501
:rtype: int
"""
return self._headings_outline_levels
@headings_outline_levels.setter
def headings_outline_levels(self, headings_outline_levels):
"""
Sets the headings_outline_levels.
Specifies how many levels of headings (paragraphs formatted with the Heading styles) to include in the document outline. Default is 0. Valid range is 0 to 9. # noqa: E501
:param headings_outline_levels: The headings_outline_levels. # noqa: E501
:type: int
"""
if headings_outline_levels is None:
raise ValueError("Invalid value for `headings_outline_levels`, must not be `None`") # noqa: E501
self._headings_outline_levels = headings_outline_levels
@property
def expanded_outline_levels(self):
"""
Gets the expanded_outline_levels. # noqa: E501
Specifies how many levels in the document outline to show expanded when the file is viewed. Default is 0. Valid range is 0 to 9. Note that this options will not work when saving to XPS. # noqa: E501
:return: The expanded_outline_levels. # noqa: E501
:rtype: int
"""
return self._expanded_outline_levels
@expanded_outline_levels.setter
def expanded_outline_levels(self, expanded_outline_levels):
"""
Sets the expanded_outline_levels.
Specifies how many levels in the document outline to show expanded when the file is viewed. Default is 0. Valid range is 0 to 9. Note that this options will not work when saving to XPS. # noqa: E501
:param expanded_outline_levels: The expanded_outline_levels. # noqa: E501
:type: int
"""
if expanded_outline_levels is None:
raise ValueError("Invalid value for `expanded_outline_levels`, must not be `None`") # noqa: E501
self._expanded_outline_levels = expanded_outline_levels
@property
def rotate(self):
"""
Gets the rotate. # noqa: E501
Rotate page # noqa: E501
:return: The rotate. # noqa: E501
:rtype: str
"""
return self._rotate
@rotate.setter
def rotate(self, rotate):
"""
Sets the rotate.
Rotate page # noqa: E501
:param rotate: The rotate. # noqa: E501
:type: str
"""
if rotate is None:
raise ValueError("Invalid value for `rotate`, must not be `None`") # noqa: E501
allowed_values = ["None", "On90", "On180", "On270"] # noqa: E501
if not rotate.isdigit():
if rotate not in allowed_values:
raise ValueError(
"Invalid value for `rotate` ({0}), must be one of {1}" # noqa: E501
.format(rotate, allowed_values))
self._rotate = rotate
else:
self._rotate = allowed_values[int(rotate) if six.PY3 else long(rotate)]
@property
def watermark_options(self):
"""
Gets the watermark_options. # noqa: E501
Watermark specific options # noqa: E501
:return: The watermark_options. # noqa: E501
:rtype: WatermarkOptions
"""
return self._watermark_options
@watermark_options.setter
def watermark_options(self, watermark_options):
"""
Sets the watermark_options.
Watermark specific options # noqa: E501
:param watermark_options: The watermark_options. # noqa: E501
:type: WatermarkOptions
"""
self._watermark_options = watermark_options
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PdfConvertOptions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"marce.rotten@gmail.com"
] | marce.rotten@gmail.com |
853844b8902491afe57cebbbb509d1c6a63f6d96 | 8a1144dd38388992c7e35a4cc84002e381f2cf1f | /python/django_fundamentals/login_and_registration/apps/login_registration/migrations/0001_initial.py | 9ea1b5fcdbc6512bea58f815ae55408d3fa91aa6 | [] | no_license | vin792/dojo_assignments | 18472e868610bacbd0b5141a5322628f4afefb5b | 449b752f92df224285bfd5d03901a3692a98562e | refs/heads/master | 2021-01-20T00:20:09.896742 | 2017-05-26T17:37:09 | 2017-05-26T17:37:09 | 82,735,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 20:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('hash_password', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
]
| [
"vin792@gmail.com"
] | vin792@gmail.com |
912291a36038721951374e02ac6c78b525f90162 | 6968c7f9d2b20b5296663829f99a27d184a59fc1 | /autodisc/autodisc/test/explorers/test_goalspacedensityexplorer.py | fabe579ee7322405d892713d4320675ee74cfeb5 | [
"Apache-2.0",
"MIT"
] | permissive | flowersteam/automated_discovery_of_lenia_patterns | d42dff37323d51732571b33845c0562d844f498f | 97cc7cde2120fa95225d1e470e00b8aa8c034e97 | refs/heads/master | 2020-06-29T07:08:58.404541 | 2020-05-14T07:37:10 | 2020-05-14T07:37:10 | 200,470,902 | 13 | 2 | null | null | null | null | UTF-8 | Python | false | false | 12,729 | py | import autodisc as ad
import numpy as np
import os
from autodisc.explorers.goalspacedensityexplorer import GoalSpaceDensityExplorer
def test_statistic_goalspace():
# use lenia as test system
system = ad.systems.Lenia()
config = GoalSpaceDensityExplorer.default_config()
config.seed = 1
config.num_of_random_initialization = 2
# Parameter 2: R
parameter = ad.Config()
parameter.name = 'R'
parameter.type = 'sampling'
parameter.init = ('discrete', 2, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 2, 'max': 20}
config.run_parameters.append(parameter)
# Parameter 3: T
parameter = ad.Config()
parameter.name = 'T'
parameter.type = 'sampling'
parameter.init = ('discrete', 1, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 1, 'max': 20}
config.run_parameters.append(parameter)
# which statistics are used as a goal space
statistic_representation_config = ad.representations.static.StatisticRepresentation.default_config()
statistic_representation_config.statistics = ['activation_mass_mean',
'activation_mass_std']
config.goal_space_representation = ad.representations.static.StatisticRepresentation(config=statistic_representation_config)
# how are goals sampled
config.source_parameter_selection.type = 'random'
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
assert np.shape(explorer.reached_goal_library) == (5, 2)
def goal_space_func_with_config(observations, statistics, config):
# check if given parameters are correct
assert config == 'test'
assert 'states' in observations
assert 'timepoints' in observations
assert 'activation_mass' in statistics
return [1, 2, 3]
def goal_space_func_without_config(observations, statistics):
# check if given parameters are correct
assert 'states' in observations
assert 'timepoints' in observations
assert 'activation_mass' in statistics
return [10, 20, 30]
def test_function_goalspace():
# use lenia as test system
system = ad.systems.Lenia()
config = GoalSpaceDensityExplorer.default_config()
config.seed = 1
config.num_of_random_initialization = 2
# Parameter 2: R
parameter = ad.Config()
parameter.name = 'R'
parameter.type = 'sampling'
parameter.init = ('discrete', 2, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 2, 'max': 20}
config.run_parameters.append(parameter)
# Parameter 3: T
parameter = ad.Config()
parameter.name = 'T'
parameter.type = 'sampling'
parameter.init = ('discrete', 1, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 1, 'max': 20}
config.run_parameters.append(parameter)
# which statistics are used as a goal space
goal_space_config = ad.representations.FunctionRepresentation.default_config()
goal_space_config.function = goal_space_func_without_config
config.goal_space_representation = ad.representations.FunctionRepresentation(config=goal_space_config)
# how are goals sampled
config.source_parameter_selection.type = 'random'
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
assert np.all(explorer.reached_goal_library == [[10, 20, 30], [10, 20, 30], [10, 20, 30], [10, 20, 30], [10, 20, 30]])
del explorer
#########################################################
# with config
config.goal_space_representation.config.function = goal_space_func_with_config
config.goal_space_representation.config.config = 'test'
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
assert np.all(explorer.reached_goal_library == [[1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3]])
del explorer
#########################################################
# function as string
config.goal_space_representation.config.function = 'autodisc.test.explorers.test_goalspaceexplorer.goal_space_func_with_config'
config.goal_space_representation.config.config = 'test'
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
assert np.all(explorer.reached_goal_library == [[1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3]])
del explorer
def test_cppn_evolution():
# use lenia as test system
system = ad.systems.Lenia()
config = GoalSpaceDensityExplorer.default_config()
config.seed = 1
config.num_of_random_initialization = 2
parameter = ad.Config()
parameter.name = 'init_state'
parameter.type = 'cppn_evolution'
parameter.init = ad.cppn.TwoDMatrixCCPNNEATEvolution.default_config()
parameter.init.neat_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test_neat_single.cfg')
parameter.init.n_generations = 1
parameter.init.best_genome_of_last_generation = True
parameter.mutate = ad.cppn.TwoDMatrixCCPNNEATEvolution.default_config()
parameter.mutate.neat_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test_neat_single.cfg')
parameter.mutate.n_generations = 2
parameter.mutate.best_genome_of_last_generation = True
config.run_parameters.append(parameter)
# Parameter 2: R
parameter = ad.Config()
parameter.name = 'R'
parameter.type = 'sampling'
parameter.init = ('discrete', 2, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 2, 'max': 20}
config.run_parameters.append(parameter)
# Parameter 3: T
parameter = ad.Config()
parameter.name = 'T'
parameter.type = 'sampling'
parameter.init = ('discrete', 1, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 1, 'max': 20}
config.run_parameters.append(parameter)
# which statistics are used as a goal space
statistic_representation_config = ad.representations.static.StatisticRepresentation.default_config()
statistic_representation_config.statistics = ['activation_mass_mean',
'activation_mass_std']
config.goal_space_representation = ad.representations.static.StatisticRepresentation(config=statistic_representation_config)
# how are goals sampled
config.source_parameter_selection.type = 'random'
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
# def test_specific_goal_selection():
# # use lenia as test system
#
# system = ad.systems.Lenia()
#
# config = ad.explorers.GoalSpaceExplorer.default_config()
# config.seed = 1
# config.num_of_random_initialization = 2
#
# # Parameter 2: R
# parameter = ad.Config()
# parameter.name = 'R'
# parameter.type = 'sampling'
# parameter.init = ('discrete', 2, 20)
# parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 2, 'max': 20}
# config.run_parameters.append(parameter)
#
# # Parameter 3: T
# parameter = ad.Config()
# parameter.name = 'T'
# parameter.type = 'sampling'
# parameter.init = ('discrete', 1, 20)
# parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 1, 'max': 20}
# config.run_parameters.append(parameter)
#
# # which statistics are used as a goal space
# statistic_representation_config = ad.representations.static.StatisticRepresentation.default_config()
# statistic_representation_config.statistics = ['activation_mass_mean',
# 'activation_mass_std']
# config.goal_space_representation = ad.representations.static.StatisticRepresentation(config=statistic_representation_config)
#
# # how are goals sampled
# config.goal_selection.type = 'specific'
# config.goal_selection.goals = [[1, 2],
# [3, 4]]
#
# explorer = ad.explorers.GoalSpaceExplorer(system=system, config=config)
# explorer.run(5, verbose=False)
#
# assert len(explorer.data) == 5
# assert np.shape(explorer.statistics.reached_initial_goals) == (2, 2) # no statistics should have been computed
# assert np.shape(explorer.statistics.target_goals) == (3, 2) # no statistics should have been computed
# assert np.shape(explorer.statistics.reached_goals) == (3, 2) # no statistics should have been computed
#
# # check if only defined target goals are used
# for trg_goal in explorer.statistics.target_goals:
# assert np.any([ np.all(trg_goal == x) for x in np.array([[1, 2], [3, 4]]) ])
def test_constraint_source_policy_selection():
# use lenia as test system
system = ad.systems.Lenia()
config = GoalSpaceDensityExplorer.default_config()
config.seed = 1
config.num_of_random_initialization = 2
# Parameter 2: R
parameter = ad.Config()
parameter.name = 'R'
parameter.type = 'sampling'
parameter.init = ('discrete', 2, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 2, 'max': 20}
config.run_parameters.append(parameter)
# Parameter 3: T
parameter = ad.Config()
parameter.name = 'T'
parameter.type = 'sampling'
parameter.init = ('discrete', 1, 20)
parameter.mutate = {'type': 'discrete', 'distribution': 'gauss', 'sigma': 0.5, 'min': 1, 'max': 20}
config.run_parameters.append(parameter)
# which statistics are used as a goal space
statistic_representation_config = ad.representations.static.StatisticRepresentation.default_config()
statistic_representation_config.statistics = ['activation_mass_mean',
'activation_mass_std']
config.goal_space_representation = ad.representations.static.StatisticRepresentation(config=statistic_representation_config)
# how are goals sampled
config.source_parameter_selection.type = 'optimal'
#########################################################################################
# simple constraint
# how source policies are selected
config.source_parameter_selection.constraints = [('id', '==', 1)]
config.source_parameter_selection.goal_space_constraints = [(0, 1), (0, 40)]
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
assert explorer.data[2].source_parameter_idx == 1
assert explorer.data[3].source_parameter_idx == 1
assert explorer.data[4].source_parameter_idx == 1
#########################################################################################
# active settings TRUE / FALSE
# how source policies are selected
config.source_parameter_selection.constraints = [dict(active = False,
filter = ('id', '==', 1)),
dict(active=True,
filter=('id', '==', 0))
]
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
assert explorer.data[2].source_parameter_idx == 0
assert explorer.data[3].source_parameter_idx == 0
assert explorer.data[4].source_parameter_idx == 0
#########################################################################################
# active settings as constraint
# how source policies are selected
config.source_parameter_selection.constraints = [dict(active = (('max', 'id'), '<', 3),
filter = ('id', '==', 1)),
dict(active = (('max', 'id'), '>=', 3),
filter=('id', '==', 0))
]
explorer = GoalSpaceDensityExplorer(system=system, config=config)
explorer.run(5, verbose=False)
assert len(explorer.data) == 5
assert explorer.data[2].source_parameter_idx == 1
assert explorer.data[3].source_parameter_idx == 1
assert explorer.data[4].source_parameter_idx == 0 | [
"chris.reinke@inria.fr"
] | chris.reinke@inria.fr |
21816f17050910e9e18ccd56d0411ceab4b65bd8 | a8139ccd50a27861d3c5a4168fd0e4b351c0a514 | /material/code/advanced_oop_and_python_topics/4_ManagedAttributeDemo/ManagedAttributeDemo2.py | 79ffd4b1f17b51ffde72f399ac413b1c616988d1 | [] | no_license | shambhand/pythontraining | a124aa1485c3ce0e589fc2cd93c1e991746432e4 | 24dd923e2b2c07c70500775e3665e2a527240329 | refs/heads/master | 2021-05-17T22:54:45.331127 | 2019-01-11T03:12:59 | 2019-01-11T03:12:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | # Attribute properties are inherited by the
# derived class
import sys
class Person:
def __init__(self, name):
self._name = name
def getName(self):
print('fetch...')
return self._name
def setName(self, value):
print('change...')
self._name = value
def delName(self):
print('remove...')
del self._name
name = property(getName, setName, delName, "name property docs")
class Employee (Person):
pass
def main ():
bob = Employee('Bob Smith')
print(bob.name)
bob.name = 'Robert Smith'
print(bob.name)
del bob.name
print('-'*20)
sue = Employee('Sue Jones')
print(sue.name)
print(Person.name.__doc__)
sys.exit (0)
main ()
| [
"amit2766@gmail.com"
] | amit2766@gmail.com |
4ffaf02529c48182b78088a71725d0fd48b1e682 | b945118b1344f05921af1f9974e5d6e370967253 | /cloudflu/r0.5/cloudflu/amazon/apps/reservation_rm.py | e97806bf40cf663f3ed7a01019d84af190e32399 | [
"Apache-2.0"
] | permissive | asimurzin/balloon-testing | aab85316d2a63c3a497b5afe46467c78c17a2691 | 35d72685a319fa66ee7006841b75c54bd62434e0 | refs/heads/master | 2016-09-06T20:16:59.157112 | 2011-07-26T11:51:49 | 2011-07-26T11:51:49 | 989,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,803 | py | #!/usr/bin/env python
#--------------------------------------------------------------------------------------
## Copyright 2010 Alexey Petrov
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
## See http://sourceforge.net/apps/mediawiki/cloudflu
##
## Author : Alexey Petrov
##
#--------------------------------------------------------------------------------------
"""
Deletes the appointed Amazon EC2 reservation and release all its incorporated resources
"""
#--------------------------------------------------------------------------------------
import cloudflu.common as common
from cloudflu.common import print_d, print_e, sh_command
from cloudflu.common import Timer, WorkerPool
from cloudflu import amazon
from cloudflu.amazon import ec2
#--------------------------------------------------------------------------------------
def execute( the_ec2_conn, the_cluster_id ) :
try:
a_reservation = ec2.use.get_reservation( the_ec2_conn, the_cluster_id )
print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) )
except Exception, exc:
print_e( '%s\n' % exc, False )
return
a_security_group = None
try:
a_security_group = ec2.use.get_security_group( the_ec2_conn, a_reservation )
print_d( "< %r > : %s\n" % ( a_security_group, a_security_group.rules ) )
except:
from cloudflu.common import print_traceback
print_traceback()
pass
an_instance = a_reservation.instances[ 0 ]
an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name )
for an_instance in a_reservation.instances:
an_instance.terminate()
pass
try:
the_ec2_conn.delete_key_pair( an_instance.key_name )
import os; os.remove( an_identity_file )
except:
from cloudflu.common import print_traceback
print_traceback()
pass
try:
the_ec2_conn.delete_security_group( a_security_group.name )
except:
from cloudflu.common import print_traceback
print_traceback()
pass
print_d( '%s ' % an_instance.update() )
while an_instance.update() != 'terminated' :
print_d( '.' )
continue
print_d( ' %s\n' % an_instance.update() )
pass
#--------------------------------------------------------------------------------------
def main() :
#----------------------- Defining utility command-line interface -------------------------
an_usage_description = "%prog"
from reservation_rm_options import usage_description as usage_description_options
an_usage_description += usage_description_options()
from cloudflu import VERSION
a_version = "%s" % VERSION
from optparse import IndentedHelpFormatter
a_help_formatter = IndentedHelpFormatter( width = 127 )
from optparse import OptionParser
an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )
#----------------------- Definition of the command line arguments ------------------------
ec2.use.options.add( an_option_parser )
amazon.security_options.add( an_option_parser )
common.options.add( an_option_parser )
#------------------ Extracting and verifying command-line arguments ----------------------
an_options, an_args = an_option_parser.parse_args()
common.options.extract( an_option_parser )
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )
from reservation_rm_options import extract as extract_options
a_cluster_ids = extract_options( an_option_parser )
from cloudflu.preferences import get
a_cluster_location = get( 'amazon.cluster.location' )
print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
import sys; an_engine = sys.argv[ 0 ]
from reservation_rm_options import compose as compose_options
a_call = "%s %s" % ( an_engine, compose_options( a_cluster_ids ) )
print_d( a_call + '\n' )
print_d( "\n----------------------- Running actual functionality ----------------------\n" )
a_spent_time = Timer()
an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )
a_worker_pool = WorkerPool( len( a_cluster_ids ) )
for a_cluster_id in a_cluster_ids :
a_worker_pool.charge( execute, ( an_ec2_conn, a_cluster_id ) )
pass
a_worker_pool.shutdown()
a_worker_pool.join()
print_d( "a_spent_time = %s, sec\n" % a_spent_time )
print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
# There are no - it is a terminal step
print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
print_d( a_call + '\n' )
print_d( "\n-------------------------------------- OK ---------------------------------\n" )
pass
#------------------------------------------------------------------------------------------
if __name__ == '__main__' :
main()
pass
#------------------------------------------------------------------------------------------
| [
"asimurzin@gmail.com"
] | asimurzin@gmail.com |
1c3dc4c7a6e7fa587df8ac58227acadc80637fca | 14d8418ca5990217be67aee89fdaa310db03fbba | /models/website.py | 860b81e8317c7d462118ecfceccd0e54019dd405 | [
"Apache-2.0"
] | permissive | sachanta/lm-sdk-python | 3a16457bd2d5b880a0d238a88a9d1d5b8d9675f0 | e476d415c7279457f79b5d032a73d950af2fe96b | refs/heads/master | 2023-08-03T08:39:42.842790 | 2021-09-13T07:20:56 | 2021-09-13T07:20:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,624 | py | # coding: utf-8
"""
LogicMonitor REST API
LogicMonitor is a SaaS-based performance monitoring platform that provides full visibility into complex, hybrid infrastructures, offering granular performance monitoring and actionable data and insights. logicmonitor_sdk enables you to manage your LogicMonitor account programmatically. Note: For Python SDKs, the REQUEST parameters can contain camelCase or an underscore. However, the RESPONSE parameters will always contain an underscore. For example, the REQUEST parameter can be testLocation or test_location. The RESPONSE parameter will be test_location. # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.name_and_value import NameAndValue # noqa: F401,E501
from logicmonitor_sdk.models.website_check_point import WebsiteCheckPoint # noqa: F401,E501
from logicmonitor_sdk.models.website_collector_info import WebsiteCollectorInfo # noqa: F401,E501
from logicmonitor_sdk.models.website_location import WebsiteLocation # noqa: F401,E501
class Website(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'template': 'object',
'test_location': 'WebsiteLocation',
'group_id': 'int',
'overall_alert_level': 'str',
'polling_interval': 'int',
'description': 'str',
'disable_alerting': 'bool',
'type': 'str',
'last_updated': 'int',
'stop_monitoring_by_folder': 'bool',
'id': 'int',
'stop_monitoring': 'bool',
'user_permission': 'str',
'individual_sm_alert_enable': 'bool',
'checkpoints': 'list[WebsiteCheckPoint]',
'transition': 'int',
'global_sm_alert_cond': 'int',
'is_internal': 'bool',
'collectors': 'list[WebsiteCollectorInfo]',
'name': 'str',
'use_default_location_setting': 'bool',
'use_default_alert_setting': 'bool',
'individual_alert_level': 'str',
'properties': 'list[NameAndValue]',
'status': 'str'
}
attribute_map = {
'template': 'template',
'test_location': 'testLocation',
'group_id': 'groupId',
'overall_alert_level': 'overallAlertLevel',
'polling_interval': 'pollingInterval',
'description': 'description',
'disable_alerting': 'disableAlerting',
'type': 'type',
'last_updated': 'lastUpdated',
'stop_monitoring_by_folder': 'stopMonitoringByFolder',
'id': 'id',
'stop_monitoring': 'stopMonitoring',
'user_permission': 'userPermission',
'individual_sm_alert_enable': 'individualSmAlertEnable',
'checkpoints': 'checkpoints',
'transition': 'transition',
'global_sm_alert_cond': 'globalSmAlertCond',
'is_internal': 'isInternal',
'collectors': 'collectors',
'name': 'name',
'use_default_location_setting': 'useDefaultLocationSetting',
'use_default_alert_setting': 'useDefaultAlertSetting',
'individual_alert_level': 'individualAlertLevel',
'properties': 'properties',
'status': 'status'
}
discriminator_value_class_map = {
'webcheck': 'WebCheck',
'pingcheck': 'PingCheck'
}
def __init__(self, template=None, test_location=None, group_id=None, overall_alert_level=None, polling_interval=None, description=None, disable_alerting=None, type=None, last_updated=None, stop_monitoring_by_folder=None, id=None, stop_monitoring=None, user_permission=None, individual_sm_alert_enable=None, checkpoints=None, transition=None, global_sm_alert_cond=None, is_internal=None, collectors=None, name=None, use_default_location_setting=None, use_default_alert_setting=None, individual_alert_level=None, properties=None, status=None): # noqa: E501
"""Website - a model defined in Swagger""" # noqa: E501
self._template = None
self._test_location = None
self._group_id = None
self._overall_alert_level = None
self._polling_interval = None
self._description = None
self._disable_alerting = None
self._type = None
self._last_updated = None
self._stop_monitoring_by_folder = None
self._id = None
self._stop_monitoring = None
self._user_permission = None
self._individual_sm_alert_enable = None
self._checkpoints = None
self._transition = None
self._global_sm_alert_cond = None
self._is_internal = None
self._collectors = None
self._name = None
self._use_default_location_setting = None
self._use_default_alert_setting = None
self._individual_alert_level = None
self._properties = None
self._status = None
self.discriminator = 'type'
if template is not None:
self.template = template
self.test_location = test_location
if group_id is not None:
self.group_id = group_id
if overall_alert_level is not None:
self.overall_alert_level = overall_alert_level
if polling_interval is not None:
self.polling_interval = polling_interval
if description is not None:
self.description = description
if disable_alerting is not None:
self.disable_alerting = disable_alerting
self.type = type
if last_updated is not None:
self.last_updated = last_updated
if stop_monitoring_by_folder is not None:
self.stop_monitoring_by_folder = stop_monitoring_by_folder
if id is not None:
self.id = id
if stop_monitoring is not None:
self.stop_monitoring = stop_monitoring
if user_permission is not None:
self.user_permission = user_permission
if individual_sm_alert_enable is not None:
self.individual_sm_alert_enable = individual_sm_alert_enable
if checkpoints is not None:
self.checkpoints = checkpoints
if transition is not None:
self.transition = transition
if global_sm_alert_cond is not None:
self.global_sm_alert_cond = global_sm_alert_cond
if is_internal is not None:
self.is_internal = is_internal
if collectors is not None:
self.collectors = collectors
self.name = name
if use_default_location_setting is not None:
self.use_default_location_setting = use_default_location_setting
if use_default_alert_setting is not None:
self.use_default_alert_setting = use_default_alert_setting
if individual_alert_level is not None:
self.individual_alert_level = individual_alert_level
if properties is not None:
self.properties = properties
if status is not None:
self.status = status
@property
def template(self):
"""Gets the template of this Website. # noqa: E501
The website template # noqa: E501
:return: The template of this Website. # noqa: E501
:rtype: object
"""
return self._template
@template.setter
def template(self, template):
"""Sets the template of this Website.
The website template # noqa: E501
:param template: The template of this Website. # noqa: E501
:type: object
"""
self._template = template
@property
def test_location(self):
"""Gets the test_location of this Website. # noqa: E501
The locations from which the website is monitored. If the website is internal, this field should include Collectors. If Non-Internal, possible test locations are: 1 : US - LA 2 : US - DC 3 : US - SF 4 : Europe - Dublin 5 : Asia - Singapore 6 : Australia - Sydney testLocation:\"{all:true}\" indicates that the service will be monitored from all checkpoint locations testLocation:\"{smgIds:[1,2,3]}\" indicates that the service will be monitored from checkpoint locations 1, 2 and 3 testLocation:\"{collectorIds:[85,90]}\" indicates that the service will be monitored by Collectors 85 and 90 # noqa: E501
:return: The test_location of this Website. # noqa: E501
:rtype: WebsiteLocation
"""
return self._test_location
@test_location.setter
def test_location(self, test_location):
"""Sets the test_location of this Website.
The locations from which the website is monitored. If the website is internal, this field should include Collectors. If Non-Internal, possible test locations are: 1 : US - LA 2 : US - DC 3 : US - SF 4 : Europe - Dublin 5 : Asia - Singapore 6 : Australia - Sydney testLocation:\"{all:true}\" indicates that the service will be monitored from all checkpoint locations testLocation:\"{smgIds:[1,2,3]}\" indicates that the service will be monitored from checkpoint locations 1, 2 and 3 testLocation:\"{collectorIds:[85,90]}\" indicates that the service will be monitored by Collectors 85 and 90 # noqa: E501
:param test_location: The test_location of this Website. # noqa: E501
:type: WebsiteLocation
"""
if test_location is None:
raise ValueError("Invalid value for `test_location`, must not be `None`") # noqa: E501
self._test_location = test_location
@property
def group_id(self):
"""Gets the group_id of this Website. # noqa: E501
The id of the group the website is in # noqa: E501
:return: The group_id of this Website. # noqa: E501
:rtype: int
"""
return self._group_id
@group_id.setter
def group_id(self, group_id):
"""Sets the group_id of this Website.
The id of the group the website is in # noqa: E501
:param group_id: The group_id of this Website. # noqa: E501
:type: int
"""
self._group_id = group_id
@property
def overall_alert_level(self):
"""Gets the overall_alert_level of this Website. # noqa: E501
warn | error | critical The level of alert to trigger if the website fails the number of checks specified by transition from the test locations specified by globalSmAlertCond # noqa: E501
:return: The overall_alert_level of this Website. # noqa: E501
:rtype: str
"""
return self._overall_alert_level
@overall_alert_level.setter
def overall_alert_level(self, overall_alert_level):
"""Sets the overall_alert_level of this Website.
warn | error | critical The level of alert to trigger if the website fails the number of checks specified by transition from the test locations specified by globalSmAlertCond # noqa: E501
:param overall_alert_level: The overall_alert_level of this Website. # noqa: E501
:type: str
"""
self._overall_alert_level = overall_alert_level
@property
def polling_interval(self):
"""Gets the polling_interval of this Website. # noqa: E501
1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 The polling interval for the website, in units of minutes. This value indicates how often the website is checked. The minimum is 1 minute, and the maximum is 10 minutes # noqa: E501
:return: The polling_interval of this Website. # noqa: E501
:rtype: int
"""
return self._polling_interval
@polling_interval.setter
def polling_interval(self, polling_interval):
"""Sets the polling_interval of this Website.
1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 The polling interval for the website, in units of minutes. This value indicates how often the website is checked. The minimum is 1 minute, and the maximum is 10 minutes # noqa: E501
:param polling_interval: The polling_interval of this Website. # noqa: E501
:type: int
"""
self._polling_interval = polling_interval
@property
def description(self):
"""Gets the description of this Website. # noqa: E501
The description of the website # noqa: E501
:return: The description of this Website. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this Website.
The description of the website # noqa: E501
:param description: The description of this Website. # noqa: E501
:type: str
"""
self._description = description
@property
def disable_alerting(self):
"""Gets the disable_alerting of this Website. # noqa: E501
true: alerting is disabled for the website false: alerting is enabled for the website If stopMonitoring=true, then alerting will also by default be disabled for the website # noqa: E501
:return: The disable_alerting of this Website. # noqa: E501
:rtype: bool
"""
return self._disable_alerting
@disable_alerting.setter
def disable_alerting(self, disable_alerting):
"""Sets the disable_alerting of this Website.
true: alerting is disabled for the website false: alerting is enabled for the website If stopMonitoring=true, then alerting will also by default be disabled for the website # noqa: E501
:param disable_alerting: The disable_alerting of this Website. # noqa: E501
:type: bool
"""
self._disable_alerting = disable_alerting
@property
def type(self):
"""Gets the type of this Website. # noqa: E501
The type of the website. Acceptable values are: pingcheck, webcheck # noqa: E501
:return: The type of this Website. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this Website.
The type of the website. Acceptable values are: pingcheck, webcheck # noqa: E501
:param type: The type of this Website. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def last_updated(self):
"""Gets the last_updated of this Website. # noqa: E501
The time (in epoch format) that the website was updated # noqa: E501
:return: The last_updated of this Website. # noqa: E501
:rtype: int
"""
return self._last_updated
@last_updated.setter
def last_updated(self, last_updated):
"""Sets the last_updated of this Website.
The time (in epoch format) that the website was updated # noqa: E501
:param last_updated: The last_updated of this Website. # noqa: E501
:type: int
"""
self._last_updated = last_updated
@property
def stop_monitoring_by_folder(self):
"""Gets the stop_monitoring_by_folder of this Website. # noqa: E501
true: monitoring is disabled for all services in the website's folder false: monitoring is not disabled for all services in website's folder # noqa: E501
:return: The stop_monitoring_by_folder of this Website. # noqa: E501
:rtype: bool
"""
return self._stop_monitoring_by_folder
@stop_monitoring_by_folder.setter
def stop_monitoring_by_folder(self, stop_monitoring_by_folder):
"""Sets the stop_monitoring_by_folder of this Website.
true: monitoring is disabled for all services in the website's folder false: monitoring is not disabled for all services in website's folder # noqa: E501
:param stop_monitoring_by_folder: The stop_monitoring_by_folder of this Website. # noqa: E501
:type: bool
"""
self._stop_monitoring_by_folder = stop_monitoring_by_folder
@property
def id(self):
"""Gets the id of this Website. # noqa: E501
The id of the website # noqa: E501
:return: The id of this Website. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Website.
The id of the website # noqa: E501
:param id: The id of this Website. # noqa: E501
:type: int
"""
self._id = id
@property
def stop_monitoring(self):
"""Gets the stop_monitoring of this Website. # noqa: E501
true: monitoring is disabled for the website false: monitoring is enabled for the website If stopMonitoring=true, then alerting will also by default be disabled for the website # noqa: E501
:return: The stop_monitoring of this Website. # noqa: E501
:rtype: bool
"""
return self._stop_monitoring
@stop_monitoring.setter
def stop_monitoring(self, stop_monitoring):
"""Sets the stop_monitoring of this Website.
true: monitoring is disabled for the website false: monitoring is enabled for the website If stopMonitoring=true, then alerting will also by default be disabled for the website # noqa: E501
:param stop_monitoring: The stop_monitoring of this Website. # noqa: E501
:type: bool
"""
self._stop_monitoring = stop_monitoring
@property
def user_permission(self):
"""Gets the user_permission of this Website. # noqa: E501
write | read | ack. The permission level of the user that made the API request # noqa: E501
:return: The user_permission of this Website. # noqa: E501
:rtype: str
"""
return self._user_permission
@user_permission.setter
def user_permission(self, user_permission):
"""Sets the user_permission of this Website.
write | read | ack. The permission level of the user that made the API request # noqa: E501
:param user_permission: The user_permission of this Website. # noqa: E501
:type: str
"""
self._user_permission = user_permission
@property
def individual_sm_alert_enable(self):
"""Gets the individual_sm_alert_enable of this Website. # noqa: E501
true: an alert will be triggered if a check fails from an individual test location false: an alert will not be triggered if a check fails from an individual test location # noqa: E501
:return: The individual_sm_alert_enable of this Website. # noqa: E501
:rtype: bool
"""
return self._individual_sm_alert_enable
@individual_sm_alert_enable.setter
def individual_sm_alert_enable(self, individual_sm_alert_enable):
"""Sets the individual_sm_alert_enable of this Website.
true: an alert will be triggered if a check fails from an individual test location false: an alert will not be triggered if a check fails from an individual test location # noqa: E501
:param individual_sm_alert_enable: The individual_sm_alert_enable of this Website. # noqa: E501
:type: bool
"""
self._individual_sm_alert_enable = individual_sm_alert_enable
@property
def checkpoints(self):
"""Gets the checkpoints of this Website. # noqa: E501
The checkpoints from the which the website is monitored. This object should reference each location specified in testLocation in addition to an 'Overall' checkpoint # noqa: E501
:return: The checkpoints of this Website. # noqa: E501
:rtype: list[WebsiteCheckPoint]
"""
return self._checkpoints
@checkpoints.setter
def checkpoints(self, checkpoints):
"""Sets the checkpoints of this Website.
The checkpoints from the which the website is monitored. This object should reference each location specified in testLocation in addition to an 'Overall' checkpoint # noqa: E501
:param checkpoints: The checkpoints of this Website. # noqa: E501
:type: list[WebsiteCheckPoint]
"""
self._checkpoints = checkpoints
@property
def transition(self):
"""Gets the transition of this Website. # noqa: E501
1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 30 | 60 The number of checks that must fail before an alert is triggered # noqa: E501
:return: The transition of this Website. # noqa: E501
:rtype: int
"""
return self._transition
@transition.setter
def transition(self, transition):
"""Sets the transition of this Website.
1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 30 | 60 The number of checks that must fail before an alert is triggered # noqa: E501
:param transition: The transition of this Website. # noqa: E501
:type: int
"""
self._transition = transition
@property
def global_sm_alert_cond(self):
"""Gets the global_sm_alert_cond of this Website. # noqa: E501
The number of test locations that checks must fail at to trigger an alert, where the alert triggered will be consistent with the value of overallAlertLevel. Possible values and corresponding number of Site Monitor locations are 0 : all 1 : half 2 : more than one 3 : any # noqa: E501
:return: The global_sm_alert_cond of this Website. # noqa: E501
:rtype: int
"""
return self._global_sm_alert_cond
@global_sm_alert_cond.setter
def global_sm_alert_cond(self, global_sm_alert_cond):
"""Sets the global_sm_alert_cond of this Website.
The number of test locations that checks must fail at to trigger an alert, where the alert triggered will be consistent with the value of overallAlertLevel. Possible values and corresponding number of Site Monitor locations are 0 : all 1 : half 2 : more than one 3 : any # noqa: E501
:param global_sm_alert_cond: The global_sm_alert_cond of this Website. # noqa: E501
:type: int
"""
self._global_sm_alert_cond = global_sm_alert_cond
@property
def is_internal(self):
"""Gets the is_internal of this Website. # noqa: E501
Whether or not the website is internal # noqa: E501
:return: The is_internal of this Website. # noqa: E501
:rtype: bool
"""
return self._is_internal
@is_internal.setter
def is_internal(self, is_internal):
"""Sets the is_internal of this Website.
Whether or not the website is internal # noqa: E501
:param is_internal: The is_internal of this Website. # noqa: E501
:type: bool
"""
self._is_internal = is_internal
@property
def collectors(self):
"""Gets the collectors of this Website. # noqa: E501
The collectors that are monitoring the website, if the website is internal # noqa: E501
:return: The collectors of this Website. # noqa: E501
:rtype: list[WebsiteCollectorInfo]
"""
return self._collectors
@collectors.setter
def collectors(self, collectors):
"""Sets the collectors of this Website.
The collectors that are monitoring the website, if the website is internal # noqa: E501
:param collectors: The collectors of this Website. # noqa: E501
:type: list[WebsiteCollectorInfo]
"""
self._collectors = collectors
@property
def name(self):
"""Gets the name of this Website. # noqa: E501
The name of the website # noqa: E501
:return: The name of this Website. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Website.
The name of the website # noqa: E501
:param name: The name of this Website. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def use_default_location_setting(self):
"""Gets the use_default_location_setting of this Website. # noqa: E501
true: The checkpoint locations configured in the website Default Settings will be used false: The checkpoint locations specified in the testLocation will be used # noqa: E501
:return: The use_default_location_setting of this Website. # noqa: E501
:rtype: bool
"""
return self._use_default_location_setting
@use_default_location_setting.setter
def use_default_location_setting(self, use_default_location_setting):
"""Sets the use_default_location_setting of this Website.
true: The checkpoint locations configured in the website Default Settings will be used false: The checkpoint locations specified in the testLocation will be used # noqa: E501
:param use_default_location_setting: The use_default_location_setting of this Website. # noqa: E501
:type: bool
"""
self._use_default_location_setting = use_default_location_setting
@property
def use_default_alert_setting(self):
"""Gets the use_default_alert_setting of this Website. # noqa: E501
true: The alert settings configured in the website Default Settings will be used false: Service Default Settings will not be used, and you will need to specify individualSMAlertEnable, individualAlertLevel, globalSmAlertConf, overallAlertLevel and pollingInterval # noqa: E501
:return: The use_default_alert_setting of this Website. # noqa: E501
:rtype: bool
"""
return self._use_default_alert_setting
@use_default_alert_setting.setter
def use_default_alert_setting(self, use_default_alert_setting):
"""Sets the use_default_alert_setting of this Website.
true: The alert settings configured in the website Default Settings will be used false: Service Default Settings will not be used, and you will need to specify individualSMAlertEnable, individualAlertLevel, globalSmAlertConf, overallAlertLevel and pollingInterval # noqa: E501
:param use_default_alert_setting: The use_default_alert_setting of this Website. # noqa: E501
:type: bool
"""
self._use_default_alert_setting = use_default_alert_setting
@property
def individual_alert_level(self):
"""Gets the individual_alert_level of this Website. # noqa: E501
warn | error | critical The level of alert to trigger if the website fails a check from an individual test location # noqa: E501
:return: The individual_alert_level of this Website. # noqa: E501
:rtype: str
"""
return self._individual_alert_level
@individual_alert_level.setter
def individual_alert_level(self, individual_alert_level):
"""Sets the individual_alert_level of this Website.
warn | error | critical The level of alert to trigger if the website fails a check from an individual test location # noqa: E501
:param individual_alert_level: The individual_alert_level of this Website. # noqa: E501
:type: str
"""
self._individual_alert_level = individual_alert_level
@property
def properties(self):
"""Gets the properties of this Website. # noqa: E501
The properties associated with the website # noqa: E501
:return: The properties of this Website. # noqa: E501
:rtype: list[NameAndValue]
"""
return self._properties
@properties.setter
def properties(self, properties):
"""Sets the properties of this Website.
The properties associated with the website # noqa: E501
:param properties: The properties of this Website. # noqa: E501
:type: list[NameAndValue]
"""
self._properties = properties
@property
def status(self):
"""Gets the status of this Website. # noqa: E501
Whether is the website dead (the collector is down) or not # noqa: E501
:return: The status of this Website. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this Website.
Whether is the website dead (the collector is down) or not # noqa: E501
:param status: The status of this Website. # noqa: E501
:type: str
"""
self._status = status
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[self.discriminator].lower()
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Website, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Website):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"bamboo@build01.us-west-1.logicmonitor.net"
] | bamboo@build01.us-west-1.logicmonitor.net |
2e3607e4c88c2ff608609c6e92f1369a1a64eb09 | 77311ad9622a7d8b88707d7cee3f44de7c8860cb | /res/scripts/client/gui/shared/gui_items/dossier/factories.py | 0a30a9ed5c74503078b8853569b8c45fbb26c42e | [] | no_license | webiumsk/WOT-0.9.14-CT | 9b193191505a4560df4e872e022eebf59308057e | cfe0b03e511d02c36ce185f308eb48f13ecc05ca | refs/heads/master | 2021-01-10T02:14:10.830715 | 2016-02-14T11:59:59 | 2016-02-14T11:59:59 | 51,606,676 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 12,884 | py | # 2016.02.14 12:41:35 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/shared/gui_items/dossier/factories.py
import nations
from dossiers2.ui.achievements import ACHIEVEMENT_TYPE, getType as getAchieveType, ACHIEVEMENT_BLOCK as _AB, WHITE_TIGER_RECORD, RARE_STORAGE_RECORD
from gui.shared.gui_items.dossier import achievements as _as
from gui.shared.gui_items.dossier.achievements import abstract as _abstract_achievements
class _AchieveFactory(object):
def __init__(self, achieveClass, name, block, dossier):
self._achieveClass = achieveClass
self._name = name
self._block = block
self._dossier = dossier
def getName(self):
return self._name
def getBlock(self):
return self._block
def getDossier(self):
return self._dossier
def getAchieveClass(self):
return self._achieveClass
def isInDossier(self):
return self._achieveClass.checkIsInDossier(self._block, self._name, self._dossier)
def isValid(self):
return self._achieveClass.checkIsValid(self._block, self._name, self._dossier)
def create(self, value = None):
return self._achieveClass(self._name, self._block, self._dossier, value)
@classmethod
def get(cls, achieveClass):
return lambda name, block, dossier: cls(achieveClass, name, block, dossier)
class _CustomAchieveFactory(_AchieveFactory):
def create(self, value = None):
return self._achieveClass(self._dossier, value)
@classmethod
def get(cls, achieveClass):
return lambda name, block, dossier: cls(achieveClass, name, block, dossier)
class _BlockAchieveFactory(_AchieveFactory):
def __init__(self, achieveClass, name, block, dossier):
super(_BlockAchieveFactory, self).__init__(achieveClass, name, block, dossier)
def create(self, value = None):
return self._achieveClass(self._name, self._dossier, value)
@classmethod
def get(cls, achieveClass):
return lambda name, block, dossier: cls(achieveClass, name, block, dossier)
class _SequenceAchieveFactory(_AchieveFactory):
def create(self, value = None):
counts = {}
achieves = self._dossier.getBlock(self._block)
for achieveID in set(achieves):
counts[achieveID] = achieves.count(achieveID)
result = {}
for achieveID, count in counts.iteritems():
factory = getAchievementFactory((self._block, achieveID), self._dossier)
if factory is not None:
achieve = factory.create(value=count)
if achieve is not None:
result[achieveID] = achieve
return result
@classmethod
def get(cls, defaultClass):
return lambda name, block, dossier: cls(defaultClass, name, block, dossier)
def isInDossier(self):
return True
class _RareAchievesFactory(_SequenceAchieveFactory):
def isValid(self):
return not self._dossier.isInRoaming()
@classmethod
def get(cls):
return lambda rareID, block, dossier: cls(_abstract_achievements.RareAchievement, rareID, block, dossier)
class _NationAchieveFactory(_AchieveFactory):
def __init__(self, achieveClass, name, nationID, block, dossier):
super(_NationAchieveFactory, self).__init__(achieveClass, name, block, dossier)
self._nationID = nationID
def getNationID(self):
return self._nationID
def create(self, value = None):
return self._achieveClass(self._nationID, self._block, self._dossier, value)
@classmethod
def get(cls, achieveClass, nationID = -1):
return lambda name, block, dossier: cls(achieveClass, name, nationID, block, dossier)
_ACHIEVEMENTS_BY_BLOCK = {_AB.RARE: _BlockAchieveFactory.get(_abstract_achievements.RareAchievement)}
_ACHIEVEMENTS_BY_TYPE = {ACHIEVEMENT_TYPE.CLASS: _AchieveFactory.get(_abstract_achievements.ClassProgressAchievement),
ACHIEVEMENT_TYPE.SERIES: _AchieveFactory.get(_abstract_achievements.SeriesAchievement)}
_ACHIEVEMENTS_BY_NAME = {(_AB.TOTAL, 'tankExpert'): _NationAchieveFactory.get(_as.TankExpertAchievement),
(_AB.TOTAL, 'mechanicEngineer'): _NationAchieveFactory.get(_as.MechEngineerAchievement),
(_AB.TOTAL, 'mousebane'): _CustomAchieveFactory.get(_as.MousebaneAchievement),
(_AB.TOTAL, 'beasthunter'): _CustomAchieveFactory.get(_as.BeasthunterAchievement),
(_AB.TOTAL, 'pattonValley'): _CustomAchieveFactory.get(_as.PattonValleyAchievement),
(_AB.TOTAL, 'sinai'): _CustomAchieveFactory.get(_as.SinaiAchievement),
(_AB.TOTAL, 'markOfMastery'): _CustomAchieveFactory.get(_as.MarkOfMasteryAchievement),
(_AB.TOTAL, 'medalKnispel'): _CustomAchieveFactory.get(_as.MedalKnispelAchievement),
(_AB.TOTAL, 'medalCarius'): _CustomAchieveFactory.get(_as.MedalCariusAchievement),
(_AB.TOTAL, 'medalAbrams'): _CustomAchieveFactory.get(_as.MedalAbramsAchievement),
(_AB.TOTAL, 'medalPoppel'): _CustomAchieveFactory.get(_as.MedalPoppelAchievement),
(_AB.TOTAL, 'medalKay'): _CustomAchieveFactory.get(_as.MedalKayAchievement),
(_AB.TOTAL, 'medalEkins'): _CustomAchieveFactory.get(_as.MedalEkinsAchievement),
(_AB.TOTAL, 'medalLeClerc'): _CustomAchieveFactory.get(_as.MedalLeClercAchievement),
(_AB.TOTAL, 'medalLavrinenko'): _CustomAchieveFactory.get(_as.MedalLavrinenkoAchievement),
(_AB.TOTAL, 'marksOnGun'): _CustomAchieveFactory.get(_as.MarkOnGunAchievement),
(_AB.TOTAL, 'sniper'): _AchieveFactory.get(_abstract_achievements.DeprecatedAchievement),
(_AB.TOTAL, 'medalWittmann'): _AchieveFactory.get(_abstract_achievements.DeprecatedAchievement),
(_AB.TOTAL, 'reliableComrade'): _CustomAchieveFactory.get(_as.ReliableComradeAchievement),
(_AB.TOTAL, 'readyForBattleLT'): _CustomAchieveFactory.get(_as.ReadyForBattleLTAchievement),
(_AB.TOTAL, 'readyForBattleMT'): _CustomAchieveFactory.get(_as.ReadyForBattleMTAchievement),
(_AB.TOTAL, 'readyForBattleHT'): _CustomAchieveFactory.get(_as.ReadyForBattleHTAchievement),
(_AB.TOTAL, 'readyForBattleSPG'): _CustomAchieveFactory.get(_as.ReadyForBattleSPGAchievement),
(_AB.TOTAL, 'readyForBattleATSPG'): _CustomAchieveFactory.get(_as.ReadyForBattleATSPGAchievement),
(_AB.TOTAL, 'testartilleryman'): _AchieveFactory.get(_as.Achieved),
(_AB.CLAN, 'medalRotmistrov'): _CustomAchieveFactory.get(_as.MedalRotmistrovAchievement),
(_AB.RATED_7X7, 'strategicOperations'): _CustomAchieveFactory.get(_as.StrategicOperationsAchievement),
(_AB.FORT, 'fireAndSword'): _CustomAchieveFactory.get(_as.FireAndSwordAchievement),
(_AB.FORT, 'soldierOfFortune'): _CustomAchieveFactory.get(_as.SoldierOfFortuneAchievement),
(_AB.FORT, 'kampfer'): _CustomAchieveFactory.get(_as.KampferAchievement),
(_AB.FORT, 'conqueror'): _CustomAchieveFactory.get(_as.ConquerorAchievement),
(_AB.HISTORICAL, 'makerOfHistory'): _CustomAchieveFactory.get(_as.MakerOfHistoryAchievement),
(_AB.HISTORICAL, 'guardsman'): _CustomAchieveFactory.get(_as.GuardsmanAchievement),
(_AB.SINGLE, 'diehard'): _CustomAchieveFactory.get(_as.DiehardAchievement),
(_AB.SINGLE, 'invincible'): _CustomAchieveFactory.get(_as.InvincibleAchievement),
(_AB.SINGLE, 'tacticalBreakthrough'): _CustomAchieveFactory.get(_as.TacticalBreakthroughAchievement),
(_AB.SINGLE, 'handOfDeath'): _CustomAchieveFactory.get(_as.HandOfDeathAchievement),
(_AB.SINGLE, 'armorPiercer'): _CustomAchieveFactory.get(_as.ArmorPiercerAchievement),
(_AB.SINGLE, 'titleSniper'): _CustomAchieveFactory.get(_as.TitleSniperAchievement),
(_AB.SINGLE, 'victoryMarch'): _CustomAchieveFactory.get(_as.VictoryMarchAchievement),
(_AB.SINGLE_7X7, 'victoryMarch'): _CustomAchieveFactory.get(_as.VictoryMarchClubAchievement),
(_AB.SINGLE, 'battleCitizen'): _AchieveFactory.get(_abstract_achievements.QuestAchievement),
(_AB.SINGLE, 'WFC2014'): _CustomAchieveFactory.get(_as.WFC2014Achievement),
(_AB.SINGLE, 'deathTrack'): _CustomAchieveFactory.get(_as.DeathTrackAchievement),
(_AB.SINGLE, 'aimer'): _CustomAchieveFactory.get(_as.AimerAchievement),
(_AB.SINGLE, 'tankwomen'): _CustomAchieveFactory.get(_as.TankwomenAchievement),
(_AB.SINGLE, 'operationWinter'): _AchieveFactory.get(_abstract_achievements.QuestAchievement),
(_AB.SINGLE, 'fallout'): _AchieveFactory.get(_as.Achieved),
(_AB.SINGLE, 'fallout2'): _AchieveFactory.get(_as.Achieved),
(_AB.SINGLE, 'falloutSingleWolf'): _AchieveFactory.get(_as.Achieved),
(_AB.SINGLE, 'falloutPackOfWolfs'): _AchieveFactory.get(_as.Achieved),
(_AB.SINGLE, 'falloutSteelHunter'): _AchieveFactory.get(_as.Achieved),
(_AB.SINGLE, 'falloutAlwaysInLine'): _AchieveFactory.get(_as.Achieved),
(_AB.TEAM_7X7, 'geniusForWarMedal'): _CustomAchieveFactory.get(_as.GeniusForWarAchievement),
(_AB.TEAM_7X7, 'wolfAmongSheepMedal'): _CustomAchieveFactory.get(_as.WolfAmongSheepAchievement),
(_AB.TEAM_7X7, 'fightingReconnaissanceMedal'): _CustomAchieveFactory.get(_as.FightingReconnaissanceAchievement),
(_AB.TEAM_7X7, 'crucialShotMedal'): _CustomAchieveFactory.get(_as.CrucialShotAchievement),
(_AB.TEAM_7X7, 'forTacticalOperations'): _CustomAchieveFactory.get(_as.ForTacticalOperationsAchievement),
(_AB.TEAM_7X7, 'battleTested'): _CustomAchieveFactory.get(_as.BattleTestedAchievement),
(_AB.TEAM_7X7, 'guerrillaMedal'): _CustomAchieveFactory.get(_as.GuerrillaAchievement),
(_AB.TEAM_7X7, 'infiltratorMedal'): _CustomAchieveFactory.get(_as.InfiltratorAchievement),
(_AB.TEAM_7X7, 'sentinelMedal'): _CustomAchieveFactory.get(_as.SentinelAchievement),
(_AB.TEAM_7X7, 'prematureDetonationMedal'): _CustomAchieveFactory.get(_as.PrematureDetonationAchievement),
(_AB.TEAM_7X7, 'bruteForceMedal'): _CustomAchieveFactory.get(_as.BruteForceAchievement),
(_AB.TEAM_7X7, 'promisingFighterMedal'): _CustomAchieveFactory.get(_as.PromisingFighterAchievement),
(_AB.TEAM_7X7, 'heavyFireMedal'): _CustomAchieveFactory.get(_as.HeavyFireAchievement),
(_AB.TEAM_7X7, 'rangerMedal'): _CustomAchieveFactory.get(_as.RangerAchievement),
(_AB.TEAM_7X7, 'fireAndSteelMedal'): _CustomAchieveFactory.get(_as.FireAndSteelAchievement),
(_AB.TEAM_7X7, 'pyromaniacMedal'): _CustomAchieveFactory.get(_as.PyromaniacAchievement),
(_AB.UNIQUE, 'histBattle1_battlefield'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle2_battlefield'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle3_battlefield'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle4_battlefield'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle5_battlefield'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle6_battlefield'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle1_historyLessons'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle2_historyLessons'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle3_historyLessons'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle4_historyLessons'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle5_historyLessons'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.UNIQUE, 'histBattle6_historyLessons'): _AchieveFactory.get(_abstract_achievements.HistoricalAchievement),
(_AB.FALLOUT, 'stormLord'): _CustomAchieveFactory.get(_as.StormLordAchievement),
(_AB.FALLOUT, 'winnerLaurels'): _CustomAchieveFactory.get(_as.WinnerLaurelsAchievement),
(_AB.FALLOUT, 'sauronEye'): _CustomAchieveFactory.get(_as.SauronsEyeAchievement),
WHITE_TIGER_RECORD: _CustomAchieveFactory.get(_as.WhiteTigerAchievement),
RARE_STORAGE_RECORD: _RareAchievesFactory.get()}
for _nID, _ in enumerate(nations.NAMES):
_ACHIEVEMENTS_BY_NAME[_AB.TOTAL, 'tankExpert%d' % _nID] = _NationAchieveFactory.get(_as.TankExpertAchievement, _nID)
_ACHIEVEMENTS_BY_NAME[_AB.TOTAL, 'mechanicEngineer%d' % _nID] = _NationAchieveFactory.get(_as.MechEngineerAchievement, _nID)
def getAchievementFactory(record, dossier = None):
achieveType = getAchieveType(record)
if record in _ACHIEVEMENTS_BY_NAME:
factoryMaker = _ACHIEVEMENTS_BY_NAME[record]
elif achieveType is not None and achieveType in _ACHIEVEMENTS_BY_TYPE:
factoryMaker = _ACHIEVEMENTS_BY_TYPE[achieveType]
elif record[0] in _ACHIEVEMENTS_BY_BLOCK:
factoryMaker = _ACHIEVEMENTS_BY_BLOCK[record[0]]
else:
factoryMaker = _AchieveFactory.get(_abstract_achievements.RegularAchievement)
return factoryMaker(record[1], record[0], dossier)
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\shared\gui_items\dossier\factories.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:41:35 Střední Evropa (běžný čas)
| [
"info@webium.sk"
] | info@webium.sk |
ee05403c9afac4b0de051068d81af1443f0da820 | 9b0babcf6849e11b3f208e702d2b36fd049f63f2 | /Nov03/04Pattern638.py | 3c7b60988e41f0991146ca7c16ac43e3bf4cb094 | [] | no_license | nish235/PythonPrograms | d5ec56647d06136aef9501d732e7da32e82f3947 | f657c1263098665a50b1b1fcbfc49bea6ce7af6f | refs/heads/main | 2023-03-18T23:02:34.974009 | 2021-03-13T06:18:00 | 2021-03-13T06:18:00 | 302,834,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | n = 5
a = n
b = n
for x in range(1, n+1):
for y in range(1, n * 2 + 1):
if y == a or y == b:
print(chr(a+64), end="")
else:
print(" ", end="")
a -= 1
b += 1
print()
| [
"noreply@github.com"
] | nish235.noreply@github.com |
07b601a95a976174ad12c9cdc6438dbe30736569 | 8f24e443e42315a81028b648e753c50967c51c78 | /release/k8s_tests/run_gcs_ft_on_k8s.py | 88997663a45da106ea15bb41777657025fdf1466 | [
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | simon-mo/ray | d07efdada8d05c6e10417f96e8dfc35f9ad33397 | 1e42e6cd15e2fb96c217cba8484e59ed0ef4b0c8 | refs/heads/master | 2023-03-06T00:09:35.758834 | 2022-12-23T18:46:48 | 2022-12-23T18:46:48 | 122,156,396 | 4 | 2 | Apache-2.0 | 2023-03-04T08:56:56 | 2018-02-20T04:47:06 | Python | UTF-8 | Python | false | false | 12,176 | py | import enum
import json
import subprocess
from kubernetes import client, config, watch
import requests
import random
import uuid
import pathlib
import time
import ray
import os
# global variables for the cluster informations
CLUSTER_ID = None
RAY_CLUSTER_NAME = None
RAY_SERVICE_NAME = None
LOCUST_ID = None
# Kill node type
class TestScenario(enum.Enum):
KILL_WORKER_NODE = "kill_worker_node"
KILL_HEAD_NODE = "kill_head_node"
if os.environ.get("RAY_IMAGE") is not None:
ray_image = os.environ.get("RAY_IMAGE")
elif ray.__version__ != "3.0.0.dev0":
ray_image = f"rayproject/ray:{ray.__version__}"
elif ray.__commit__ == "{{RAY_COMMIT_SHA}}":
ray_image = "rayproject/ray:nightly"
else:
ray_image = f"rayproject/ray:{ray.__commit__[:6]}"
config.load_kube_config()
cli = client.CoreV1Api()
yaml_path = pathlib.Path("/tmp/ray_v1alpha1_rayservice.yaml")
def generate_cluster_variable():
global CLUSTER_ID
global RAY_CLUSTER_NAME
global RAY_SERVICE_NAME
global LOCUST_ID
CLUSTER_ID = str(uuid.uuid4()).split("-")[0]
RAY_CLUSTER_NAME = "cluster-" + CLUSTER_ID
RAY_SERVICE_NAME = "service-" + CLUSTER_ID
LOCUST_ID = "ray-locust-" + CLUSTER_ID
def check_kuberay_installed():
# Make sure the ray namespace exists
KUBERAY_VERSION = "v0.3.0"
uri = (
"github.com/ray-project/kuberay/manifests"
f"/base?ref={KUBERAY_VERSION}&timeout=90s"
)
print(
subprocess.check_output(
[
"kubectl",
"apply",
"-k",
uri,
]
).decode()
)
pods = subprocess.check_output(
["kubectl", "get", "pods", "--namespace", "ray-system", "--no-headers"]
).decode()
assert pods.split("\n") != 0
def start_rayservice():
# step-1: generate the yaml file
print(f"Using ray image: {ray_image}")
solution = "\n".join(
[
f" {line}"
for line in pathlib.Path("./solution.py").read_text().splitlines()
]
)
locustfile = "\n".join(
[
f" {line}"
for line in pathlib.Path("./locustfile.py").read_text().splitlines()
]
)
template = (
pathlib.Path("ray_v1alpha1_rayservice_template.yaml")
.read_text()
.format(
cluster_id=CLUSTER_ID,
ray_image=ray_image,
solution=solution,
locustfile=locustfile,
)
)
print("=== YamlFile ===")
print(template)
tmp_yaml = pathlib.Path("/tmp/ray_v1alpha1_rayservice.yaml")
tmp_yaml.write_text(template)
print("=== Get Pods from ray-system ===")
print(
subprocess.check_output(
["kubectl", "get", "pods", "--namespace", "ray-system", "--no-headers"]
).decode()
)
# step-2: create the cluter
print(f"Creating cluster with id: {CLUSTER_ID}")
print(subprocess.check_output(["kubectl", "create", "-f", str(tmp_yaml)]).decode())
# step-3: make sure the ray cluster is up
w = watch.Watch()
start_time = time.time()
head_pod_name = None
for event in w.stream(
func=cli.list_namespaced_pod,
namespace="default",
label_selector=f"rayCluster={RAY_CLUSTER_NAME},ray.io/node-type=head",
timeout_seconds=60,
):
if event["object"].status.phase == "Running":
assert event["object"].kind == "Pod"
head_pod_name = event["object"].metadata.name
end_time = time.time()
print(f"{CLUSTER_ID} started in {end_time-start_time} sec")
print(f"head pod {head_pod_name}")
break
assert head_pod_name is not None
# step-4: e2e check it's alive
cmd = """
import requests
print(requests.get('http://localhost:8000/?val=123').text)
"""
while True:
try:
resp = (
subprocess.check_output(
f'kubectl exec {head_pod_name} -- python -c "{cmd}"', shell=True
)
.decode()
.strip()
)
if resp == "375":
print("Service is up now!")
break
else:
print(f"Failed with msg {resp}")
except Exception as e:
print("Error", e)
time.sleep(2)
def start_port_forward():
proc = None
proc = subprocess.Popen(
[
"kubectl",
"port-forward",
f"svc/{RAY_SERVICE_NAME}-serve-svc",
"8000:8000",
"--address=0.0.0.0",
]
)
while True:
try:
resp = requests.get(
"http://localhost:8000/",
timeout=1,
params={
"val": 10,
},
)
if resp.status_code == 200:
print("The ray service is ready!!!")
break
except requests.exceptions.Timeout:
pass
except requests.exceptions.ConnectionError:
pass
print("Waiting for the proxy to be alive")
time.sleep(1)
return proc
def warmup_cluster(num_reqs):
for _ in range(num_reqs):
resp = requests.get(
"http://localhost:8000/",
timeout=1,
params={
"val": 10,
},
)
assert resp.status_code == 200
def start_sending_traffics(duration, users):
print("=== Install locust by helm ===")
yaml_config = (
pathlib.Path("locust-run.yaml")
.read_text()
.format(users=users, cluster_id=CLUSTER_ID, duration=int(duration))
)
print("=== Locust YAML ===")
print(yaml_config)
pathlib.Path("/tmp/locust-run-config.yaml").write_text(yaml_config)
helm_install_logs = subprocess.check_output(
[
"helm",
"install",
LOCUST_ID,
"deliveryhero/locust",
"-f",
"/tmp/locust-run-config.yaml",
]
)
print(helm_install_logs)
timeout_wait_for_locust_s = 300
while timeout_wait_for_locust_s > 0:
labels = [
f"app.kubernetes.io/instance=ray-locust-{CLUSTER_ID}",
"app.kubernetes.io/name=locust,component=master",
]
pods = cli.list_namespaced_pod("default", label_selector=",".join(labels))
assert len(pods.items) == 1
if pods.items[0].status.phase == "Pending":
print("Waiting for the locust pod to be ready...")
time.sleep(30)
timeout_wait_for_locust_s -= 30
else:
break
proc = subprocess.Popen(
[
"kubectl",
"port-forward",
f"svc/ray-locust-{CLUSTER_ID}",
"8080:8089",
"--address=0.0.0.0",
]
)
return proc
def dump_pods_actors(pod_name):
print(
subprocess.run(
f"kubectl exec {pod_name} -- ps -ef | grep ::",
shell=True,
capture_output=True,
).stdout.decode()
)
def kill_head():
pods = cli.list_namespaced_pod(
"default",
label_selector=f"rayCluster={RAY_CLUSTER_NAME},ray.io/node-type=head",
)
if pods.items[0].status.phase == "Running":
print(f"Killing header {pods.items[0].metadata.name}")
dump_pods_actors(pods.items[0].metadata.name)
cli.delete_namespaced_pod(pods.items[0].metadata.name, "default")
def kill_worker():
pods = cli.list_namespaced_pod(
"default",
label_selector=f"rayCluster={RAY_CLUSTER_NAME},ray.io/node-type=worker",
)
alive_pods = [
(p.status.start_time, p.metadata.name)
for p in pods.items
if p.status.phase == "Running"
]
# sorted(alive_pods)
# We kill the oldest nodes for now given the memory leak in serve.
# to_be_killed = alive_pods[-1][1]
to_be_killed = random.choice(alive_pods)[1]
print(f"Killing worker {to_be_killed}")
dump_pods_actors(pods.items[0].metadata.name)
cli.delete_namespaced_pod(to_be_killed, "default")
def start_killing_nodes(duration, kill_interval, kill_node_type):
"""Kill the nodes in ray cluster.
duration: How long does we run the test (seconds)
kill_interval: The interval between two kills (seconds)
kill_head_every_n: For every n kills, we kill a head node
kill_node_type: kill either worker node or head node
"""
for kill_idx in range(1, int(duration / kill_interval)):
while True:
try:
# kill
if kill_node_type == TestScenario.KILL_HEAD_NODE:
kill_head()
elif kill_node_type == TestScenario.KILL_WORKER_NODE:
kill_worker()
break
except Exception as e:
from time import sleep
print(f"Fail to kill node, retry in 5 seconds: {e}")
sleep(5)
time.sleep(kill_interval)
def get_stats():
labels = [
f"app.kubernetes.io/instance=ray-locust-{CLUSTER_ID}",
"app.kubernetes.io/name=locust,component=master",
]
pods = cli.list_namespaced_pod("default", label_selector=",".join(labels))
assert len(pods.items) == 1
pod_name = pods.items[0].metadata.name
subprocess.check_output(
[
"kubectl",
"cp",
f"{pod_name}:/home/locust/test_result_{CLUSTER_ID}_stats_history.csv",
"./stats_history.csv",
]
)
data = []
with open("stats_history.csv") as f:
import csv
reader = csv.reader(f)
for d in reader:
data.append(d)
# The first 5mins is for warming up
offset = 300
start_time = int(data[offset][0])
end_time = int(data[-1][0])
# 17 is the index for total requests
# 18 is the index for total failed requests
total = float(data[-1][17]) - float(data[offset][17])
failures = float(data[-1][18]) - float(data[offset][18])
# Available, through put
return (total - failures) / total, total / (end_time - start_time), data
def main():
result = {
TestScenario.KILL_WORKER_NODE.value: {"rate": None},
TestScenario.KILL_HEAD_NODE.value: {"rate": None},
}
expected_result = {
TestScenario.KILL_HEAD_NODE: 0.99,
TestScenario.KILL_HEAD_NODE: 0.99,
}
check_kuberay_installed()
users = 60
for kill_node_type, kill_interval, test_duration in [
(TestScenario.KILL_WORKER_NODE, 60, 600),
(TestScenario.KILL_HEAD_NODE, 300, 1200),
]:
try:
generate_cluster_variable()
procs = []
start_rayservice()
procs.append(start_port_forward())
warmup_cluster(200)
procs.append(start_sending_traffics(test_duration * 1.1, users))
start_killing_nodes(test_duration, kill_interval, kill_node_type)
rate, qps, data = get_stats()
print("Raw Data", data, qps)
result[kill_node_type.value]["rate"] = rate
assert expected_result[kill_node_type] <= rate
assert qps > users * 10 * 0.8
except Exception as e:
print(f"{kill_node_type} HA test failed, {e}")
finally:
print("=== Cleanup ===")
subprocess.run(
["kubectl", "delete", "-f", str(yaml_path)],
capture_output=True,
)
subprocess.run(
["helm", "uninstall", LOCUST_ID],
capture_output=True,
)
for p in procs:
p.kill()
print("==== Cleanup done ===")
print("Result:", result)
test_output_json_path = os.environ.get(
"TEST_OUTPUT_JSON", "/tmp/release_test_output.json"
)
with open(test_output_json_path, "wt") as f:
json.dump(result, f)
if __name__ == "__main__":
try:
# Connect to ray so that the auto suspense
# will not start.
ray.init("auto")
except Exception:
# It doesnt' matter if it failed.
pass
main()
| [
"noreply@github.com"
] | simon-mo.noreply@github.com |
7b52b03525110b80daf809be6771e62af13a65de | b1ddcf4bac9ca603a7a2333912eb29da8bf2cb7b | /ReadOnlyModelViewset/ReadOnlyModelViewset/wsgi.py | f5e6d48aa8a7cf1cd91df179630338e674686911 | [] | no_license | sankethalake/django_practice | e9477ae0beee4923cd6758cc6d37517ea5979610 | 9877304f0c6415ae8979e5cc13a49559155fdd9d | refs/heads/main | 2023-07-07T07:07:35.598657 | 2021-08-14T06:26:23 | 2021-08-14T06:26:23 | 389,917,128 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | """
WSGI config for ReadOnlyModelViewset project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ReadOnlyModelViewset.settings')
application = get_wsgi_application()
| [
"sankethalake@gmail.com"
] | sankethalake@gmail.com |
eef03c572aa4c91aa3ca9b0b876c4ab34a9a2947 | 677ccdf2a0459179f03ef543c83d52f3b3174b98 | /django_app/wishlist/apis/wishlist.py | 060cb3d166eb89bc386295e20f2cbc039960e8f1 | [] | no_license | wps5-airbnb/airbnb | 6c4de1d5d6e7f9b2bd9190a9a84b8e19e845b761 | 68068def521bd911fbc462cb10eae23aa7b9b436 | refs/heads/master | 2021-01-01T16:18:43.854381 | 2018-10-29T06:22:18 | 2018-10-29T06:22:18 | 97,808,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,374 | py | from rest_framework import authentication, status
from rest_framework.response import Response
from rest_framework.views import APIView
from house.models import House
from house.serializers.house import HouseSerializer
from ..models import Wishlist
__all__ = [
'WishlistView',
]
class WishlistView(APIView):
authentication_classes = (authentication.TokenAuthentication,)
def post(self, request):
user = request.user
house_pk = int(request.query_params['house'])
house = House.objects.get(pk=house_pk)
instance, created = Wishlist.objects.get_or_create(
liker=user,
house=house,
)
if not created:
instance.delete()
return Response(
'{}유저가 pk={}인 하우스의 좋아요를 취소하였습니다.'.format(user.username, house.pk),
status=status.HTTP_200_OK,
)
else:
return Response(
'{}유저가 pk={}인 하우스를 좋아합니다.'.format(user.username, house.pk),
status=status.HTTP_201_CREATED,
)
def get(self, request):
user = request.user
like_houses = user.get_wishlist.order_by('wishlist__created_date').reverse()
serializer = HouseSerializer(like_houses, many=True)
return Response(serializer.data)
| [
"gaius827@gmail.com"
] | gaius827@gmail.com |
3b8f928c5a21247a282fb340146ff95376ee343a | ad05a747dc8f23822c51b02486b29c5cd2001369 | /releases/cws_toolbox/cws_tbx_1.4.6/cws_toolbox/transform_lidar/remove_blanks.py | 107baf1cd1f503a0dda18928dbbd91e908e0ea23 | [] | no_license | nickrsan/sierra-code-library | 9e2bcb51135b2f33c1ff4230ec21c0f30ab19c2c | 91abf016b63da901d4cc182f4761fe1d7f46cfe4 | refs/heads/master | 2022-04-11T10:53:17.439366 | 2020-03-24T00:32:06 | 2020-03-24T00:32:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,043 | py | import sys
import arcpy
from cws_toolbox.transform_lidar.cwslidar import *
class func_wrapper:
def __init__(self):
pass
def processor(self,data,mm,output = None): # mm is the minmax object
output.write("x y z i r g b\n")
for line in data:
match = re.search('^(\d*)\s*$',line) # if this line is just the digits
if match is not None and match.group(0) is not None: # if it matches
continue # we don't care about this line
else:
try:
newmatch = re.search('^\d+\.?\d*\s+\d+\.?\d*\s+\d+\.?\d*\s+(-?\d+)\s+',line) # find the fourth group of numbers and include the sign
if newmatch is not None and newmatch.group(1) is not None:
mm.track(newmatch.group(1))
else:
log("Problem matching line for intensity minmax")
except:
log("Problem reading and scaling intensity")
output.write("%s" % line)
log(mm.report(return_text = True))
output_dir = arcpy.GetParameterAsText(1)
setup(output_dir)
blanker = func_wrapper()
process_data(blanker)
shutdown() | [
"nickrsan@users.noreply.github.com"
] | nickrsan@users.noreply.github.com |
95f0622de154b99f837e5337ee28442eac324b12 | bb7909c8b1906d43836d798e3bf6e5d3aa12a2ea | /groovebox/utils/__init__.py | 2d84afcc8d1cf4cd0542eec3dbb20bca74d8ac5c | [] | no_license | thegroovebox/api.groovebox.org | 9e55002f22196ae4a94b05d3e6297de0d9d33c4f | 99701d24e686b5d2d0c339e5dbe6cb74a939b3d9 | refs/heads/master | 2021-01-22T14:11:44.163885 | 2016-07-30T07:47:21 | 2016-07-30T07:47:21 | 38,858,649 | 4 | 2 | null | 2016-07-30T07:47:21 | 2015-07-10T03:22:56 | Python | UTF-8 | Python | false | false | 1,795 | py | #!/usr/bin/env python
# -*-coding: utf-8 -*-
"""
utils
~~~~~
Various utilities (not groovebox specific)
:copyright: (c) 2015 by Mek
:license: see LICENSE for more details.
"""
from datetime import datetime, date
import json
import string
def subdict(d, keys, required=True):
"""Create a dictionary containing only `keys`
move to utils
"""
if required:
return dict([(k, d[k]) for k in keys])
return dict([(k, d[k]) for k in keys if k in d])
def time2sec(t):
t = str(t)
if ":" in t:
m, s = t.split(":")
return (60 * int(m)) + int(s)
if "." in t:
return t.split(".")[0]
return t
class DatetimeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
elif isinstance(obj, date):
return obj.strftime('%Y-%m-%d')
return json.JSONEncoder.default(self, obj)
class Numcoder(object):
ALPHABET = string.ascii_uppercase + string.ascii_lowercase + \
string.digits
ALPHABET_REVERSE = dict((c, i) for (i, c) in enumerate(ALPHABET))
BASE = len(ALPHABET)
@classmethod
def encode_many(cls, *ns, delim="$"):
return delim.join([str(cls.encode(n)) for n in ns])
@classmethod
def encode(cls, n):
s = []
while True:
n, r = divmod(n, cls.BASE)
s.append(cls.ALPHABET[r])
if n == 0: break
return ''.join(reversed(s))
@classmethod
def decode_many(cls, n, delim='$'):
ns = n.split(delim)
return [cls.decode(n) for n in ns]
@classmethod
def decode(cls, s):
n = 0
for c in s:
n = n * cls.BASE + cls.ALPHABET_REVERSE[c]
return n
| [
"michael.karpeles@gmail.com"
] | michael.karpeles@gmail.com |
32d7879356d81e175fd2276ed3fce8cab3d00e97 | 34ed44cdbbb641c6ace37b5caa05a850b5928980 | /impression_client/backends.py | 1ac8234b840f4300a722991080bc09b743d9a274 | [
"MIT"
] | permissive | gregschmit/django-impression-client | 627dfe89a2053e9965dc7e3ad8069e16dad55e4f | a2f4328024a67865eccaeff79567320842ab5d5c | refs/heads/master | 2020-12-01T13:35:35.092702 | 2019-12-29T02:17:15 | 2019-12-29T02:17:15 | 230,642,917 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,287 | py | """
This module implements our remote email backend.
"""
import requests
from django.core.mail.backends.base import BaseEmailBackend
from .settings import get_setting
class RemoteEmailBackend(BaseEmailBackend):
"""
This backend sends a RESTful request to the target Impression server, and allows
that remote installation of Impression to send the email(s). This backend will send
the remote server the raw from/to/cc/bcc fields, however it's up to the remote
service if it will trust you enough to use these fields.
"""
@staticmethod
def send_message(message):
"""
Send a RESTful request to the target impression server and return the response.
"""
# get target/token
try:
from .models import RemoteImpressionServer
target, token = RemoteImpressionServer.get_target_and_token()
except RuntimeError:
target = get_setting("IMPRESSION_CLIENT_DEFAULT_TARGET")
token = get_setting("IMPRESSION_CLIENT_DEFAULT_TOKEN")
# build headers
headers = {"Authorization": "Token {}".format(token)}
# determine if we should interpret the first address in "to" as the service
if message.to and not "@" in message.to[0]:
service_name = message.to[0]
to_emails = message.to[1:]
else:
service_name = get_setting("IMPRESSION_CLIENT_DEFAULT_SERVICE")
to_emails = message.to
# send the request
payload = {
"service_name": service_name,
"subject": message.subject,
"body": message.body,
"from": message.from_email,
"to": to_emails or [],
}
if message.cc:
payload["cc"] = message.cc
if message.bcc:
payload["bcc"] = message.bcc
return requests.post(target, data=payload, headers=headers)
def send_messages(self, email_messages):
"""
For each email message, send RESTful request to the remote server and return the
number which returned non-error response codes.
"""
count = 0
for msg in email_messages:
response = self.send_message(msg)
count += response.ok
return count
| [
"schmitgreg@gmail.com"
] | schmitgreg@gmail.com |
641d4a77c65b117c530880486b96976994997758 | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/core/framework/variable_pb2.py | d039b2253deae01739d32df4de63b92afd561ed4 | [] | no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:44f055eb2279cd13afd13956996ab45ffe6357c2bc77bbe30d0d98318245f426
size 11678
| [
"business030301@gmail.com"
] | business030301@gmail.com |
177a00ba50bec956baa12e26723cf225d3cdd6fc | e6a73d0f2000932c520ee88c1681c4d2c9d84b9f | /app/spiders/ritz1.py | b8b1f95c784e601baf98774069b239ca57e66965 | [] | no_license | caseydm/hotels | d6763d36f82d843215583f78519b427cca715f1b | 9138b94e8769e087c8248ceff07c61d1ea1c243f | refs/heads/master | 2021-01-21T09:56:13.166453 | 2017-02-28T00:07:50 | 2017-02-28T00:07:50 | 83,364,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | from app.spiders.marriott_scrape import scrape_marriott
from app.spiders.constants import RITZ1
scrape_marriott(RITZ1)
| [
"caseym@gmail.com"
] | caseym@gmail.com |
d09592cff798e5f6b30194c839b285354a53054a | 28a124b6a2f22a53af3b6bb754e77af88b4138e1 | /DJANGO/DjangoRestFramework/intro/comment/api/views.py | ec8c91dabbf7cefb14cd4267f581b6dfe6440a51 | [] | no_license | mebaysan/LearningKitforBeginners-Python | f7c6668a9978b52cad6cc2b969990d7bbfedc376 | 9e1a47fb14b3d81c5b009b74432902090e213085 | refs/heads/master | 2022-12-21T03:12:19.892857 | 2021-06-22T11:58:27 | 2021-06-22T11:58:27 | 173,840,726 | 18 | 4 | null | 2022-12-10T03:00:22 | 2019-03-04T23:56:27 | Python | UTF-8 | Python | false | false | 1,924 | py | from rest_framework.generics import CreateAPIView, ListAPIView, DestroyAPIView, RetrieveUpdateAPIView
from comment.models import Comment
from comment.api.serializers import CommentCreateSerializer, CommentListSerializer, CommentDeleteUpdateSerializer
from comment.api.permissions import IsOwner
from rest_framework.permissions import IsAuthenticated
from comment.api.paginators import CommentPaginator
from rest_framework.mixins import UpdateModelMixin, DestroyModelMixin
class CommentCreateAPIView(CreateAPIView):
queryset = Comment.objects.all()
serializer_class = CommentCreateSerializer
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class CommentListAPIView(ListAPIView):
# queryset = Comment.objects.all()
serializer_class = CommentListSerializer
pagination_class = CommentPaginator
def get_queryset(self):
queryset = Comment.objects.filter(parent=None)
query = self.request.GET.get('q')
if query:
queryset = Comment.objects.filter(post=query)
return queryset
# class CommentDeleteAPIView(DestroyAPIView, UpdateModelMixin):
class CommentDeleteAPIView(DestroyAPIView):
queryset = Comment.objects.all()
serializer_class = CommentDeleteUpdateSerializer
lookup_field = 'pk'
permission_classes = [IsAuthenticated, IsOwner]
# def put(self, request, *args, **kwargs): # bu view'a put isteği gelirse
# return self.update(request, *args, **kwargs)
class CommentUpdateAPIView(RetrieveUpdateAPIView, DestroyModelMixin): # bu view'da hem update hem delete yapabiliriz
queryset = Comment.objects.all()
serializer_class = CommentDeleteUpdateSerializer
lookup_field = 'pk'
permission_classes = [IsAuthenticated, IsOwner]
def delete(self, request, *args, **kwargs): # bu view'da hem update hem delete yapabiliriz
self.destroy(request, *args, **kwargs)
| [
"menesbaysan@gmail.com"
] | menesbaysan@gmail.com |
eb9f93c41f17e6c67016513077e632e91c2443b2 | 8ee5dcbdbd407eb5f294d430813b16eca22f571c | /data/HW5/hw5_308.py | 2c3b780ef113894c2152aaba21d6a3219530477f | [] | no_license | MAPLE-Robot-Subgoaling/IPT | 5e60e2ee4d0a5688bc8711ceed953e76cd2ad5d9 | f512ea3324bfdceff8df63b4c7134b5fcbb0514e | refs/heads/master | 2021-01-11T12:31:00.939051 | 2018-08-13T23:24:19 | 2018-08-13T23:24:19 | 79,373,489 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 622 | py | def main():
width = int(input("Please enter the width of the box: "))
height = int(input("Please enter the height of the box: "))
outline = str(input("Please enter a symbol for the box outline: "))
fill = str(input("Please enter a symbol for the box fill: "))
for i in range(height):
line = ""
for k in range(width):
if i == 0 or i == height-1:
line = line + outline
else:
if k == 0 or k == width-1:
line = line + outline
else:
line = line + fill
print(line)
main()
| [
"mneary1@umbc.edu"
] | mneary1@umbc.edu |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.