blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2db9a31d23446db98d3f0babc65ad406e63c0b02 | 498474967e1480acf5cc0f25756e1d748c677195 | /mmdetection3d/mmdet3d/ops/furthest_point_sample/points_sampler.py | 469037fb9ca004d1f2baa136f5d3484cc941ac49 | [
"MIT",
"Apache-2.0"
] | permissive | hustvl/MapTR | adc37f78cbae9d8c909dd8648088a4930bf55377 | feb0664e64684d3207859279f047fa54a1a806f6 | refs/heads/main | 2023-08-25T17:44:47.672149 | 2023-08-14T13:31:17 | 2023-08-14T13:31:17 | 518,672,305 | 643 | 95 | MIT | 2023-09-14T03:30:23 | 2022-07-28T02:20:43 | Python | UTF-8 | Python | false | false | 5,260 | py | import torch
from mmcv.runner import force_fp32
from torch import nn as nn
from typing import List
from .furthest_point_sample import furthest_point_sample, furthest_point_sample_with_dist
from .utils import calc_square_dist
def get_sampler_type(sampler_type):
"""Get the type and mode of points sampler.
Args:
sampler_type (str): The type of points sampler.
The valid value are "D-FPS", "F-FPS", or "FS".
Returns:
class: Points sampler type.
"""
if sampler_type == "D-FPS":
sampler = DFPS_Sampler
elif sampler_type == "F-FPS":
sampler = FFPS_Sampler
elif sampler_type == "FS":
sampler = FS_Sampler
else:
raise ValueError(
'Only "sampler_type" of "D-FPS", "F-FPS", or "FS"' f" are supported, got {sampler_type}"
)
return sampler
class Points_Sampler(nn.Module):
"""Points sampling.
Args:
num_point (list[int]): Number of sample points.
fps_mod_list (list[str]: Type of FPS method, valid mod
['F-FPS', 'D-FPS', 'FS'], Default: ['D-FPS'].
F-FPS: using feature distances for FPS.
D-FPS: using Euclidean distances of points for FPS.
FS: using F-FPS and D-FPS simultaneously.
fps_sample_range_list (list[int]): Range of points to apply FPS.
Default: [-1].
"""
def __init__(
self,
num_point: List[int],
fps_mod_list: List[str] = ["D-FPS"],
fps_sample_range_list: List[int] = [-1],
):
super(Points_Sampler, self).__init__()
# FPS would be applied to different fps_mod in the list,
# so the length of the num_point should be equal to
# fps_mod_list and fps_sample_range_list.
assert len(num_point) == len(fps_mod_list) == len(fps_sample_range_list)
self.num_point = num_point
self.fps_sample_range_list = fps_sample_range_list
self.samplers = nn.ModuleList()
for fps_mod in fps_mod_list:
self.samplers.append(get_sampler_type(fps_mod)())
self.fp16_enabled = False
@force_fp32()
def forward(self, points_xyz, features):
"""forward.
Args:
points_xyz (Tensor): (B, N, 3) xyz coordinates of the features.
features (Tensor): (B, C, N) Descriptors of the features.
Return:
Tensor: (B, npoint, sample_num) Indices of sampled points.
"""
indices = []
last_fps_end_index = 0
for fps_sample_range, sampler, npoint in zip(
self.fps_sample_range_list, self.samplers, self.num_point
):
assert fps_sample_range < points_xyz.shape[1]
if fps_sample_range == -1:
sample_points_xyz = points_xyz[:, last_fps_end_index:]
sample_features = (
features[:, :, last_fps_end_index:] if features is not None else None
)
else:
sample_points_xyz = points_xyz[:, last_fps_end_index:fps_sample_range]
sample_features = (
features[:, :, last_fps_end_index:fps_sample_range]
if features is not None
else None
)
fps_idx = sampler(sample_points_xyz.contiguous(), sample_features, npoint)
indices.append(fps_idx + last_fps_end_index)
last_fps_end_index += fps_sample_range
indices = torch.cat(indices, dim=1)
return indices
class DFPS_Sampler(nn.Module):
"""DFPS_Sampling.
Using Euclidean distances of points for FPS.
"""
def __init__(self):
super(DFPS_Sampler, self).__init__()
def forward(self, points, features, npoint):
"""Sampling points with D-FPS."""
fps_idx = furthest_point_sample(points.contiguous(), npoint)
return fps_idx
class FFPS_Sampler(nn.Module):
"""FFPS_Sampler.
Using feature distances for FPS.
"""
def __init__(self):
super(FFPS_Sampler, self).__init__()
def forward(self, points, features, npoint):
"""Sampling points with F-FPS."""
assert features is not None, "feature input to FFPS_Sampler should not be None"
features_for_fps = torch.cat([points, features.transpose(1, 2)], dim=2)
features_dist = calc_square_dist(features_for_fps, features_for_fps, norm=False)
fps_idx = furthest_point_sample_with_dist(features_dist, npoint)
return fps_idx
class FS_Sampler(nn.Module):
"""FS_Sampling.
Using F-FPS and D-FPS simultaneously.
"""
def __init__(self):
super(FS_Sampler, self).__init__()
def forward(self, points, features, npoint):
"""Sampling points with FS_Sampling."""
assert features is not None, "feature input to FS_Sampler should not be None"
features_for_fps = torch.cat([points, features.transpose(1, 2)], dim=2)
features_dist = calc_square_dist(features_for_fps, features_for_fps, norm=False)
fps_idx_ffps = furthest_point_sample_with_dist(features_dist, npoint)
fps_idx_dfps = furthest_point_sample(points, npoint)
fps_idx = torch.cat([fps_idx_ffps, fps_idx_dfps], dim=1)
return fps_idx
| [
"cnliao62@gmail.com"
] | cnliao62@gmail.com |
7b0fbc9f015cf2412785d50bd90a065d9d4a77f2 | c032a4c0f3f8bce2e9271df49a3113509ac80be8 | /code/python/echomesh/base/Quit.py | 94de10817f214ca37f9c3b2caaeabcbb69bf461d | [
"MIT"
] | permissive | neteler/echomesh | e36fad86f84188014515ab4d67790f776b149280 | 23fdc02f852d7f2fd8e118da2e75be9612ada8c7 | refs/heads/master | 2020-12-03T09:12:09.665447 | 2014-07-05T18:39:24 | 2014-07-05T18:39:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | from __future__ import absolute_import, division, print_function, unicode_literals
import atexit
QUITTING = False
HANDLERS = set()
PRINT_EXCEPTIONS = not False
_REGISTERED = False
def register_atexit(handler):
HANDLERS.add(handler)
if not _REGISTERED:
_register_quit()
def unregister_atexit(handler):
try:
HANDLERS.remove(handler)
except ValueError:
pass
def request_quit():
global QUITTING
QUITTING = True
for h in HANDLERS:
try:
h()
except Exception as e:
if PRINT_EXCEPTIONS:
print('Exception during quit:', e)
def _register_quit():
global _REGISTERED
_REGISTERED = True
def atexit_quit():
if QUITTING:
reason = 'at your request'
else:
reason = 'due to a fatal error'
request_quit()
print('echomesh shut down %s.' % reason)
atexit.register(atexit_quit)
| [
"tom@swirly.com"
] | tom@swirly.com |
f186bad3e46a2bb84657547fa0215f79598e566a | ecad7a72a981f2c1f4a0819cc4770a9b6be0e1d4 | /generate_letter.py | 7075d8ceddff8373d1448de790f5c8e02b65488c | [
"MIT"
] | permissive | drewlinsley/cabc | ecc06ba6f94727485e92327a8e395f529b948d23 | 74726509e542d5f0f04bf297c211cca9f6e87b56 | refs/heads/master | 2022-04-17T23:40:21.331822 | 2020-04-18T19:11:34 | 2020-04-18T19:11:34 | 256,832,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,373 | py | import PIL
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
import numpy as np
import matplotlib.pyplot as plt
import os
from skimage.filters import threshold_otsu
import scipy
from scipy import ndimage
from scipy.interpolate import griddata
import cv2
import preprocess
if __name__ == "__main__":
# DEFINE AND LOAD FONT
script_root = '/Users/junkyungkim/Documents/PycharmProjects/cluttered_nist'
fontnames = ['FUTRFW.ttf',
'Instruction.otf',
'absender1.ttf',
'5Identification-Mono.ttf',
'7Segment.ttf',
'VCR_OSD_MONO_1.001.ttf',
'Instruction.otf',
'Segment16B Regular.ttf']
std_fontsizes = [225, 240, 225, 150, 255, 255, 255, 255]
std_thin_iters = [6, 15, 4, 9, 9, 2]
scale = 1 # 0.5
for fontname, std_fontsize, std_thin_iter in zip(fontnames, std_fontsizes, std_thin_iters):
std_fontsize = int(std_fontsize*scale)
std_thin_iter = int(std_thin_iter*scale)
font = ImageFont.truetype(os.path.join(script_root,'fonts',fontname), std_fontsize)
# RENDER
img=Image.new("RGBA", (2500, 300), (255, 255, 255))
draw = ImageDraw.Draw(img)
draw.text((0, 0), "ABCDEFGXYZ", (0, 0, 0), font=font)
draw = ImageDraw.Draw(img)
# MORPHOLOGICAL POSTPROC (FOR CONSTNAT STROKE THICKNESS)
img = 255 - np.mean(np.array(img), axis=2)
binary = img > 128
# img_closed = scipy.ndimage.binary_closing(binary.astype(np.int), iterations=20)##np.maximum(iterations / 2, 1))
img_eroded = (scipy.ndimage.morphology.binary_erosion(binary, iterations=std_thin_iter) * 255).astype(np.uint8)
landscape = preprocess.generate_distortion_mask(img_eroded, sigma=[4000,2000], num_centers=[30,20])
warped = preprocess.custom_warp(img_eroded, landscape, power=0.07)
# img_dist = img_eroded
# distCoeffs = [-.1, 1.0, 1.0, 1.0]
# focal_length = [1000, 1000]
# for coord in [[400,100],[500,150],[600,200]]:
# distCoeffs[0] = distCoeffs[0]*-1
# img_dist = custom_fisheye(img_dist, coord, distCoeffs, focal_length)
# import preprocess
# im_pixelated = preprocess.pixelate_obj(img_eroded, [10 * scale, 10 * scale], 0.1, 5 * scale, ignore_fit=True)
plt.subplot(211);plt.imshow(binary, cmap='gray')
plt.subplot(212);plt.imshow(warped, cmap='gray')
plt.show()
# thinned = zhangSuen(binary)
# plt.subplot(121)
# plt.imshow(img)
# plt.subplot(122)
# plt.imshow(thinned)
# plt.show() | [
"drewlinsley@gmail.com"
] | drewlinsley@gmail.com |
17dcde27a40003f391afec81b727a34d7431e102 | bcc199a7e71b97af6fbfd916d5a0e537369c04d9 | /leetcode/solved/1386_Shift_2D_Grid/solution.py | 1c0ab2bfa3017fd43451588edc1f42f31d0e64dc | [] | no_license | sungminoh/algorithms | 9c647e82472905a2c4e505c810b622b734d9d20d | 1389a009a02e90e8700a7a00e0b7f797c129cdf4 | refs/heads/master | 2023-05-01T23:12:53.372060 | 2023-04-24T06:34:12 | 2023-04-24T06:34:12 | 87,406,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,482 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2020 sungminoh <smoh2044@gmail.com>
#
# Distributed under terms of the MIT license.
"""
Given a 2D grid of size m x n and an integer k. You need to shift the grid k times.
In one shift operation:
Element at grid[i][j] moves to grid[i][j + 1].
Element at grid[i][n - 1] moves to grid[i + 1][0].
Element at grid[m - 1][n - 1] moves to grid[0][0].
Return the 2D grid after applying shift operation k times.
Example 1:
Input: grid = [[1,2,3],[4,5,6],[7,8,9]], k = 1
Output: [[9,1,2],[3,4,5],[6,7,8]]
Example 2:
Input: grid = [[3,8,1,9],[19,7,2,5],[4,6,11,10],[12,0,21,13]], k = 4
Output: [[12,0,21,13],[3,8,1,9],[19,7,2,5],[4,6,11,10]]
Example 3:
Input: grid = [[1,2,3],[4,5,6],[7,8,9]], k = 9
Output: [[1,2,3],[4,5,6],[7,8,9]]
Constraints:
m == grid.length
n == grid[i].length
1 <= m <= 50
1 <= n <= 50
-1000 <= grid[i][j] <= 1000
0 <= k <= 100
"""
import sys
from typing import List
import pytest
class Solution:
def shiftGrid(self, grid: List[List[int]], k: int) -> List[List[int]]:
"""
In-place
"""
if not grid or not grid[0] or k == 0:
return grid
def gcd(a, b):
if a < b:
return gcd(b, a)
if a % b == 0:
return b
return gcd(b, a % b)
def next_index(i, j, k):
j += k
x, j = divmod(j, n)
i += x
_, i = divmod(i, m)
return i, j
def iter_indexes(i, j, k):
_i, _j = next_index(i, j, k)
while (_i, _j) != (i, j):
yield _i, _j
_i, _j = next_index(_i, _j, k)
yield _i, _j
m, n = len(grid), len(grid[0])
i, j = 0, 0
for _ in range(gcd(m*n, k)):
tmp = grid[i][j]
for _i, _j in iter_indexes(i, j, k):
tmp, grid[_i][_j] = grid[_i][_j], tmp
i, j = next_index(i, j, 1)
return grid
@pytest.mark.parametrize('grid, k, expected', [
([[1,2,3],[4,5,6],[7,8,9]], 1, [[9,1,2],[3,4,5],[6,7,8]]),
([[3,8,1,9],[19,7,2,5],[4,6,11,10],[12,0,21,13]], 4, [[12,0,21,13],[3,8,1,9],[19,7,2,5],[4,6,11,10]]),
([[1,2,3],[4,5,6],[7,8,9]], 9, [[1,2,3],[4,5,6],[7,8,9]]),
])
def test(grid, k, expected):
assert expected == Solution().shiftGrid(grid, k)
if __name__ == '__main__':
sys.exit(pytest.main(["-s", "-v"] + sys.argv))
| [
"smoh2044@gmail.com"
] | smoh2044@gmail.com |
836ee8709c8b048ff79134e0e08213d8f01fd856 | 635c9f0501039f5a099849a3108e19de76092aea | /ssafy_project/project_190322/project_8/movies/urls.py | 1136c2901dad4d2cbab7e8155d3ad5706fba69fe | [] | no_license | Hansung-Lee/SSAFY | 87ebea0808bb40381678d678e1035dc5fa2c2eb0 | cdb7ae1bba0e98e733eed703da2c62217c319462 | refs/heads/master | 2020-04-14T20:03:05.975040 | 2019-05-16T08:57:21 | 2019-05-16T08:57:21 | 164,080,393 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | from django.urls import path
from . import views
app_name = 'movies'
urlpatterns = [
path('', views.index, name="index"),
path('<int:movie_id>/', views.detail, name="detail"),
path('<int:movie_id>/delete/', views.delete, name="delete"),
path('<int:movie_id>/scores/new/', views.create_score, name="create_score"),
path('<int:movie_id>/scores/<int:score_id>/delete/', views.delete_score, name="delete_score"),
path('<int:movie_id>/edit/', views.edit, name="edit"),
path('new/', views.new, name="new"),
]
| [
"ajtwlsgkst@naver.com"
] | ajtwlsgkst@naver.com |
896945d867a0323b7a1fb35358ffa30a466dee9a | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_229/ch74_2020_04_13_02_53_22_051219.py | 5eaf44ad0d6ec022abd09d63fa6ae9c97885c49d | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 331 | py | def conta_bigramas(string):
dic = dict()
i = 0
while i <= len(string):
if '{0}{1}'.format(string[i], string[i+1]) in dic:
dic['{0}{1}'.format(string[i], string[i+1])] += 1
i += 1
else:
dic['{0}{1}'.format(string[i], string[i+1])] = 1
i += 1
return dic | [
"you@example.com"
] | you@example.com |
ed8923af89964cb325794d4ffef156318ce5703a | c36d9d70cbb257b2ce9a214bcf38f8091e8fe9b7 | /623_add_one_row_to_tree.py | 770582da734c9a59f464bf65486460b2ac4d96b9 | [] | no_license | zdadadaz/coding_practice | 3452e4fc8f4a79cb98d0d4ea06ce0bcae85f96a0 | 5ed070f22f4bc29777ee5cbb01bb9583726d8799 | refs/heads/master | 2021-06-23T17:52:40.149982 | 2021-05-03T22:31:23 | 2021-05-03T22:31:23 | 226,006,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def addOneRow(self, root: TreeNode, v: int, d: int) -> TreeNode:
def dfs(root, depth, prv_left):
if depth == d:
if prv_left:
return TreeNode(v, root, None)
else:
return TreeNode(v, None, root)
if not root:
return None
root.left = dfs(root.left, depth+1, True)
root.right = dfs(root.right, depth+1, False)
return root
return dfs(root, 1, True) | [
"zdadadaz5566@gmail.com"
] | zdadadaz5566@gmail.com |
1e8215053bd8f5b9014a0cb27339c62856eb3a95 | 5141a0ec55675aebf6b2a3f2454022e3fd8ad639 | /tractseg/experiments/pretrained_models/TractSeg_HR_3D_DAug.py | 24f5daef597317290e5b07a3d0f6571cd0134d58 | [
"Apache-2.0"
] | permissive | bbastardes/TractSeg | 303871586c682983b451885e069a75822a6c09db | 70714fca0e0dc241946ffc704f05b65095c7effb | refs/heads/master | 2020-05-27T03:02:27.806224 | 2019-05-16T14:50:12 | 2019-05-16T14:50:12 | 188,459,492 | 0 | 0 | null | 2019-05-24T17:04:07 | 2019-05-24T17:04:06 | null | UTF-8 | Python | false | false | 432 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from tractseg.experiments.tract_seg import Config as TractSegConfig
class Config(TractSegConfig):
EXP_NAME = os.path.basename(__file__).split(".")[0]
MODEL = "UNet3D_Pytorch_DeepSup_sm"
DIM = "3D"
UPSAMPLE_TYPE = "trilinear"
BATCH_SIZE = 1
UNET_NR_FILT = 8
"""
Memory consumption
NR_FILT=8: 10900MB
System RAM running full (>30GB) from DAug
""" | [
"j.wasserthal@dkfz.de"
] | j.wasserthal@dkfz.de |
e74c020259508ffc4c35a385a5eec856ccdec726 | a04eff13392361cf6effa7b321ff6c931705534c | /python/ccxt/btcmarkets.py | f53fffca535744f3a3062ac5cc87b13aa7ec0650 | [
"MIT"
] | permissive | Homiex/homiex-ccxt | 89594883f06f72e8eaf3222d43a66370a030dbd2 | f669d7cb2a9276ba07c7782c5ec1a488f13d930d | refs/heads/master | 2022-07-06T19:47:38.759274 | 2020-03-16T09:27:07 | 2020-03-16T09:27:07 | 246,796,828 | 3 | 4 | MIT | 2022-06-23T01:48:09 | 2020-03-12T09:41:33 | JavaScript | UTF-8 | Python | false | false | 26,791 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import base64
import hashlib
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
class btcmarkets(Exchange):
def describe(self):
return self.deep_extend(super(btcmarkets, self).describe(), {
'id': 'btcmarkets',
'name': 'BTC Markets',
'countries': ['AU'], # Australia
'rateLimit': 1000, # market data cached for 1 second(trades cached for 2 seconds)
'has': {
'CORS': False,
'fetchOHLCV': True,
'fetchOrder': True,
'fetchOrders': True,
'fetchClosedOrders': 'emulated',
'fetchOpenOrders': True,
'fetchMyTrades': True,
'cancelOrders': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/29142911-0e1acfc2-7d5c-11e7-98c4-07d9532b29d7.jpg',
'api': {
'public': 'https://api.btcmarkets.net',
'private': 'https://api.btcmarkets.net',
'web': 'https://btcmarkets.net/data',
},
'www': 'https://btcmarkets.net',
'doc': 'https://github.com/BTCMarkets/API',
},
'api': {
'public': {
'get': [
'market/{id}/tick',
'market/{id}/orderbook',
'market/{id}/trades',
'v2/market/{id}/tickByTime/{timeframe}',
'v2/market/{id}/trades',
'v2/market/active',
],
},
'private': {
'get': [
'account/balance',
'account/{id}/tradingfee',
'fundtransfer/history',
'v2/order/open',
'v2/order/open/{id}',
'v2/order/history/{instrument}/{currency}/',
'v2/order/trade/history/{id}',
'v2/transaction/history/{currency}',
],
'post': [
'fundtransfer/withdrawCrypto',
'fundtransfer/withdrawEFT',
'order/create',
'order/cancel',
'order/history',
'order/open',
'order/trade/history',
'order/createBatch', # they promise it's coming soon...
'order/detail',
],
},
'web': {
'get': [
'market/BTCMarkets/{id}/tickByTime',
],
},
},
'timeframes': {
'1m': 'minute',
'1h': 'hour',
'1d': 'day',
},
'exceptions': {
'3': InvalidOrder,
'6': DDoSProtection,
},
'fees': {
'percentage': True,
'tierBased': True,
'maker': -0.05 / 100,
'taker': 0.20 / 100,
},
'options': {
'fees': {
'AUD': {
'maker': 0.85 / 100,
'taker': 0.85 / 100,
},
},
},
})
def fetch_transactions(self, code=None, since=None, limit=None, params={}):
self.load_markets()
request = {}
if limit is not None:
request['limit'] = limit
if since is not None:
request['since'] = since
response = self.privateGetFundtransferHistory(self.extend(request, params))
transactions = response['fundTransfers']
return self.parse_transactions(transactions, None, since, limit)
def parse_transaction_status(self, status):
# todo: find more statuses
statuses = {
'Complete': 'ok',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, item, currency=None):
#
# {
# status: 'Complete',
# fundTransferId: 1904311906,
# description: 'ETH withdraw from [me@email.com] to Address: 0xF123aa44FadEa913a7da99cc2eE202Db684Ce0e3 amount: 8.28965701 fee: 0.00000000',
# creationTime: 1529418358525,
# currency: 'ETH',
# amount: 828965701,
# fee: 0,
# transferType: 'WITHDRAW',
# errorMessage: null,
# lastUpdate: 1529418376754,
# cryptoPaymentDetail: {
# address: '0xF123aa44FadEa913a7da99cc2eE202Db684Ce0e3',
# txId: '0x8fe483b6f9523559b9ebffb29624f98e86227d2660d4a1fd4785d45e51c662c2'
# }
# }
#
# {
# status: 'Complete',
# fundTransferId: 494077500,
# description: 'BITCOIN Deposit, B 0.1000',
# creationTime: 1501077601015,
# currency: 'BTC',
# amount: 10000000,
# fee: 0,
# transferType: 'DEPOSIT',
# errorMessage: null,
# lastUpdate: 1501077601133,
# cryptoPaymentDetail: null
# }
#
# {
# "fee": 0,
# "amount": 56,
# "status": "Complete",
# "currency": "BCHABC",
# "lastUpdate": 1542339164044,
# "description": "BitcoinCashABC Deposit, P 0.00000056",
# "creationTime": 1542339164003,
# "errorMessage": null,
# "transferType": "DEPOSIT",
# "fundTransferId": 2527326972,
# "cryptoPaymentDetail": null
# }
#
timestamp = self.safe_integer(item, 'creationTime')
lastUpdate = self.safe_integer(item, 'lastUpdate')
transferType = self.safe_string(item, 'transferType')
cryptoPaymentDetail = self.safe_value(item, 'cryptoPaymentDetail', {})
address = self.safe_string(cryptoPaymentDetail, 'address')
txid = self.safe_string(cryptoPaymentDetail, 'txId')
type = None
if transferType == 'DEPOSIT':
type = 'deposit'
elif transferType == 'WITHDRAW':
type = 'withdrawal'
else:
type = transferType
fee = self.safe_float(item, 'fee')
status = self.parse_transaction_status(self.safe_string(item, 'status'))
ccy = self.safe_string(item, 'currency')
code = self.safe_currency_code(ccy)
# todo: self logic is duplicated below
amount = self.safe_float(item, 'amount')
if amount is not None:
amount = amount * 1e-8
return {
'id': self.safe_string(item, 'fundTransferId'),
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': None,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': lastUpdate,
'fee': {
'currency': code,
'cost': fee,
},
'info': item,
}
def fetch_markets(self, params={}):
response = self.publicGetV2MarketActive(params)
result = []
markets = self.safe_value(response, 'markets')
for i in range(0, len(markets)):
market = markets[i]
baseId = self.safe_string(market, 'instrument')
quoteId = self.safe_string(market, 'currency')
id = baseId + '/' + quoteId
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
fees = self.safe_value(self.safe_value(self.options, 'fees', {}), quote, self.fees)
pricePrecision = 2
amountPrecision = 4
minAmount = 0.001 # where does it come from?
minPrice = None
if quote == 'AUD':
if (base == 'XRP') or (base == 'OMG'):
pricePrecision = 4
amountPrecision = -math.log10(minAmount)
minPrice = math.pow(10, -pricePrecision)
precision = {
'amount': amountPrecision,
'price': pricePrecision,
}
limits = {
'amount': {
'min': minAmount,
'max': None,
},
'price': {
'min': minPrice,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
}
result.append({
'info': market,
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': None,
'maker': fees['maker'],
'taker': fees['taker'],
'limits': limits,
'precision': precision,
})
return result
def fetch_balance(self, params={}):
self.load_markets()
balances = self.privateGetAccountBalance(params)
result = {'info': balances}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
multiplier = 100000000
total = self.safe_float(balance, 'balance')
if total is not None:
total /= multiplier
used = self.safe_float(balance, 'pendingFunds')
if used is not None:
used /= multiplier
account = self.account()
account['used'] = used
account['total'] = total
result[code] = account
return self.parse_balance(result)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
#
# {
# "timestamp":1572307200000,
# "open":1962218,
# "high":1974850,
# "low":1962208,
# "close":1974850,
# "volume":305211315,
# }
#
multiplier = 100000000 # for price and volume
keys = ['open', 'high', 'low', 'close', 'volume']
result = [
self.safe_integer(ohlcv, 'timestamp'),
]
for i in range(0, len(keys)):
key = keys[i]
value = self.safe_float(ohlcv, key)
if value is not None:
value = value / multiplier
result.append(value)
return result
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
'timeframe': self.timeframes[timeframe],
# set to True to see candles more recent than the timestamp in the
# since parameter, if a since parameter is used, default is False
'indexForward': True,
# set to True to see the earliest candles first in the list of
# returned candles in chronological order, default is False
'sortForward': True,
}
if since is not None:
request['since'] = since
if limit is not None:
request['limit'] = limit # default is 3000
response = self.publicGetV2MarketIdTickByTimeTimeframe(self.extend(request, params))
#
# {
# "success":true,
# "paging":{
# "newer":"/v2/market/ETH/BTC/tickByTime/day?indexForward=true&since=1572307200000",
# "older":"/v2/market/ETH/BTC/tickByTime/day?since=1457827200000"
# },
# "ticks":[
# {"timestamp":1572307200000,"open":1962218,"high":1974850,"low":1962208,"close":1974850,"volume":305211315},
# {"timestamp":1572220800000,"open":1924700,"high":1951276,"low":1909328,"close":1951276,"volume":1086067595},
# {"timestamp":1572134400000,"open":1962155,"high":1962734,"low":1900905,"close":1930243,"volume":790141098},
# ],
# }
#
ticks = self.safe_value(response, 'ticks', [])
return self.parse_ohlcvs(ticks, market, timeframe, since, limit)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
}
response = self.publicGetMarketIdOrderbook(self.extend(request, params))
timestamp = self.safe_timestamp(response, 'timestamp')
return self.parse_order_book(response, timestamp)
def parse_ticker(self, ticker, market=None):
timestamp = self.safe_timestamp(ticker, 'timestamp')
symbol = None
if market is not None:
symbol = market['symbol']
last = self.safe_float(ticker, 'lastPrice')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': self.safe_float(ticker, 'bestBid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'bestAsk'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume24h'),
'quoteVolume': None,
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
}
response = self.publicGetMarketIdTick(self.extend(request, params))
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
timestamp = self.safe_timestamp(trade, 'timestamp')
symbol = None
if market is not None:
symbol = market['symbol']
id = self.safe_string(trade, 'tid')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'amount')
cost = None
if amount is not None:
if price is not None:
cost = amount * price
return {
'info': trade,
'id': id,
'order': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': None,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
# 'since': 59868345231,
'id': market['id'],
}
response = self.publicGetMarketIdTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
multiplier = 100000000 # for price and volume
orderSide = 'Bid' if (side == 'buy') else 'Ask'
request = self.ordered({
'currency': market['quote'],
})
request['currency'] = market['quote']
request['instrument'] = market['base']
request['price'] = int(price * multiplier)
request['volume'] = int(amount * multiplier)
request['orderSide'] = orderSide
request['ordertype'] = self.capitalize(type)
request['clientRequestId'] = str(self.nonce())
response = self.privatePostOrderCreate(self.extend(request, params))
id = self.safe_string(response, 'id')
return {
'info': response,
'id': id,
}
def cancel_orders(self, ids, symbol=None, params={}):
self.load_markets()
for i in range(0, len(ids)):
ids[i] = int(ids[i])
request = {
'orderIds': ids,
}
return self.privatePostOrderCancel(self.extend(request, params))
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
return self.cancel_orders([id])
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
currency = None
cost = None
if market['quote'] == 'AUD':
currency = market['quote']
cost = float(self.cost_to_precision(symbol, amount * price))
else:
currency = market['base']
cost = float(self.amount_to_precision(symbol, amount))
return {
'type': takerOrMaker,
'currency': currency,
'rate': rate,
'cost': float(self.fee_to_precision(symbol, rate * cost)),
}
def parse_my_trade(self, trade, market):
multiplier = 100000000
timestamp = self.safe_integer(trade, 'creationTime')
side = self.safe_float(trade, 'side')
side = 'buy' if (side == 'Bid') else 'sell'
# BTCMarkets always charge in AUD for AUD-related transactions.
feeCurrencyCode = None
symbol = None
if market is not None:
feeCurrencyCode = market['quote'] if (market['quote'] == 'AUD') else market['base']
symbol = market['symbol']
id = self.safe_string(trade, 'id')
price = self.safe_float(trade, 'price')
if price is not None:
price /= multiplier
amount = self.safe_float(trade, 'volume')
if amount is not None:
amount /= multiplier
feeCost = self.safe_float(trade, 'fee')
if feeCost is not None:
feeCost /= multiplier
cost = None
if price is not None:
if amount is not None:
cost = price * amount
orderId = self.safe_string(trade, 'orderId')
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': orderId,
'symbol': symbol,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'fee': {
'currency': feeCurrencyCode,
'cost': feeCost,
},
}
def parse_my_trades(self, trades, market=None, since=None, limit=None):
result = []
for i in range(0, len(trades)):
trade = self.parse_my_trade(trades[i], market)
result.append(trade)
return result
def parse_order(self, order, market=None):
multiplier = 100000000
side = 'buy' if (order['orderSide'] == 'Bid') else 'sell'
type = 'limit' if (order['ordertype'] == 'Limit') else 'market'
timestamp = self.safe_integer(order, 'creationTime')
if market is None:
market = self.market(order['instrument'] + '/' + order['currency'])
status = 'open'
if order['status'] == 'Failed' or order['status'] == 'Cancelled' or order['status'] == 'Partially Cancelled' or order['status'] == 'Error':
status = 'canceled'
elif order['status'] == 'Fully Matched' or order['status'] == 'Partially Matched':
status = 'closed'
price = self.safe_float(order, 'price') / multiplier
amount = self.safe_float(order, 'volume') / multiplier
remaining = self.safe_float(order, 'openVolume', 0.0) / multiplier
filled = amount - remaining
trades = self.parse_my_trades(order['trades'], market)
numTrades = len(trades)
cost = filled * price
average = None
lastTradeTimestamp = None
if numTrades > 0:
cost = 0
for i in range(0, numTrades):
trade = trades[i]
cost = self.sum(cost, trade['cost'])
if filled > 0:
average = cost / filled
lastTradeTimestamp = trades[numTrades - 1]['timestamp']
id = self.safe_string(order, 'id')
return {
'info': order,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': market['symbol'],
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'average': average,
'status': status,
'trades': trades,
'fee': None,
}
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
ids = [int(id)]
request = {
'orderIds': ids,
}
response = self.privatePostOrderDetail(self.extend(request, params))
numOrders = len(response['orders'])
if numOrders < 1:
raise OrderNotFound(self.id + ' No matching order found: ' + id)
order = response['orders'][0]
return self.parse_order(order)
def create_paginated_request(self, market, since=None, limit=None):
limit = 100 if (limit is None) else limit
since = 0 if (since is None) else since
request = self.ordered({
'currency': market['quoteId'],
'instrument': market['baseId'],
'limit': limit,
'since': since,
})
return request
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ': fetchOrders requires a `symbol` argument.')
self.load_markets()
market = self.market(symbol)
request = self.create_paginated_request(market, since, limit)
response = self.privatePostOrderHistory(self.extend(request, params))
return self.parse_orders(response['orders'], market)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ': fetchOpenOrders requires a `symbol` argument.')
self.load_markets()
market = self.market(symbol)
request = self.create_paginated_request(market, since, limit)
response = self.privatePostOrderOpen(self.extend(request, params))
return self.parse_orders(response['orders'], market)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
orders = self.fetch_orders(symbol, since, limit, params)
return self.filter_by(orders, 'status', 'closed')
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ': fetchMyTrades requires a `symbol` argument.')
self.load_markets()
market = self.market(symbol)
request = self.create_paginated_request(market, since, limit)
response = self.privatePostOrderTradeHistory(self.extend(request, params))
return self.parse_my_trades(response['trades'], market)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
uri = '/' + self.implode_params(path, params)
url = self.urls['api'][api] + uri
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
auth = None
headers = {
'apikey': self.apiKey,
'timestamp': nonce,
}
if method == 'POST':
headers['Content-Type'] = 'application/json'
auth = uri + "\n" + nonce + "\n" # eslint-disable-line quotes
body = self.json(params)
auth += body
else:
query = self.keysort(self.omit(params, self.extract_params(path)))
queryString = ''
if query:
queryString = self.urlencode(query)
url += '?' + queryString
queryString += "\n" # eslint-disable-line quotes
auth = uri + "\n" + queryString + nonce + "\n" # eslint-disable-line quotes
secret = base64.b64decode(self.secret)
signature = self.hmac(self.encode(auth), secret, hashlib.sha512, 'base64')
headers['signature'] = self.decode(signature)
else:
if params:
url += '?' + self.urlencode(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if 'success' in response:
if not response['success']:
error = self.safe_string(response, 'errorCode')
message = self.id + ' ' + self.json(response)
if error in self.exceptions:
ExceptionClass = self.exceptions[error]
raise ExceptionClass(message)
else:
raise ExchangeError(message)
| [
"ruiliang.guo@homiex.com"
] | ruiliang.guo@homiex.com |
2a1753961f822c219ce11fa0cb7119790bcce617 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/Nortel-Magellan-Passport-ServerAccessRsaMIB.py | 6edf88406f4f8b8004ed3dbab32eaf9d06ad2170 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 54,228 | py | #
# PySNMP MIB module Nortel-Magellan-Passport-ServerAccessRsaMIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-Magellan-Passport-ServerAccessRsaMIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:18:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
RowPointer, DisplayString, Unsigned32, Counter32, StorageType, RowStatus, Integer32 = mibBuilder.importSymbols("Nortel-Magellan-Passport-StandardTextualConventionsMIB", "RowPointer", "DisplayString", "Unsigned32", "Counter32", "StorageType", "RowStatus", "Integer32")
NonReplicated, EnterpriseDateAndTime, AsciiString, AsciiStringIndex, Hex, Link, DigitString = mibBuilder.importSymbols("Nortel-Magellan-Passport-TextualConventionsMIB", "NonReplicated", "EnterpriseDateAndTime", "AsciiString", "AsciiStringIndex", "Hex", "Link", "DigitString")
passportMIBs, components = mibBuilder.importSymbols("Nortel-Magellan-Passport-UsefulDefinitionsMIB", "passportMIBs", "components")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
iso, MibIdentifier, ObjectIdentity, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Bits, Gauge32, IpAddress, TimeTicks, NotificationType, ModuleIdentity, Integer32, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "MibIdentifier", "ObjectIdentity", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Bits", "Gauge32", "IpAddress", "TimeTicks", "NotificationType", "ModuleIdentity", "Integer32", "Counter64")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
serverAccessRsaMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116))
rsa = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108))
rsaRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 1), )
if mibBuilder.loadTexts: rsaRowStatusTable.setStatus('mandatory')
rsaRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"))
if mibBuilder.loadTexts: rsaRowStatusEntry.setStatus('mandatory')
rsaRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaRowStatus.setStatus('mandatory')
rsaComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaComponentName.setStatus('mandatory')
rsaStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaStorageType.setStatus('mandatory')
rsaIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 1, 1, 10), AsciiStringIndex().subtype(subtypeSpec=ValueSizeConstraint(1, 8)))
if mibBuilder.loadTexts: rsaIndex.setStatus('mandatory')
rsaOptionsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 10), )
if mibBuilder.loadTexts: rsaOptionsTable.setStatus('mandatory')
rsaOptionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"))
if mibBuilder.loadTexts: rsaOptionsEntry.setStatus('mandatory')
rsaLogicalProcessor = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 10, 1, 2), Link()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaLogicalProcessor.setStatus('mandatory')
rsaStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 11), )
if mibBuilder.loadTexts: rsaStateTable.setStatus('mandatory')
rsaStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 11, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"))
if mibBuilder.loadTexts: rsaStateEntry.setStatus('mandatory')
rsaAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaAdminState.setStatus('mandatory')
rsaOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaOperationalState.setStatus('mandatory')
rsaUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 11, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaUsageState.setStatus('mandatory')
rsaOperationalTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 12), )
if mibBuilder.loadTexts: rsaOperationalTable.setStatus('mandatory')
rsaOperationalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 12, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"))
if mibBuilder.loadTexts: rsaOperationalEntry.setStatus('mandatory')
rsaMaxRsiConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 12, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1000, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaMaxRsiConnections.setStatus('mandatory')
rsaRsiConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 12, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaRsiConnections.setStatus('mandatory')
rsaDna = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2))
rsaDnaRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 1), )
if mibBuilder.loadTexts: rsaDnaRowStatusTable.setStatus('mandatory')
rsaDnaRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaIndex"))
if mibBuilder.loadTexts: rsaDnaRowStatusEntry.setStatus('mandatory')
rsaDnaRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaRowStatus.setStatus('mandatory')
rsaDnaComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaComponentName.setStatus('mandatory')
rsaDnaStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaStorageType.setStatus('mandatory')
rsaDnaIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: rsaDnaIndex.setStatus('mandatory')
rsaDnaAddressTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 10), )
if mibBuilder.loadTexts: rsaDnaAddressTable.setStatus('mandatory')
rsaDnaAddressEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaIndex"))
if mibBuilder.loadTexts: rsaDnaAddressEntry.setStatus('mandatory')
rsaDnaNumberingPlanIndicator = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("x121", 0), ("e164", 1))).clone('x121')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaDnaNumberingPlanIndicator.setStatus('mandatory')
rsaDnaDataNetworkAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 10, 1, 2), DigitString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaDnaDataNetworkAddress.setStatus('mandatory')
rsaDnaOutgoingOptionsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 11), )
if mibBuilder.loadTexts: rsaDnaOutgoingOptionsTable.setStatus('mandatory')
rsaDnaOutgoingOptionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 11, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaIndex"))
if mibBuilder.loadTexts: rsaDnaOutgoingOptionsEntry.setStatus('mandatory')
rsaDnaOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disallowed", 0), ("allowed", 1))).clone('disallowed')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaOutCalls.setStatus('mandatory')
rsaDnaIncomingOptionsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 12), )
if mibBuilder.loadTexts: rsaDnaIncomingOptionsTable.setStatus('mandatory')
rsaDnaIncomingOptionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 12, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaIndex"))
if mibBuilder.loadTexts: rsaDnaIncomingOptionsEntry.setStatus('mandatory')
rsaDnaIncCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disallowed", 0), ("allowed", 1))).clone('allowed')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaIncCalls.setStatus('mandatory')
rsaDnaIncAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 12, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disallowed", 0), ("allowed", 1))).clone('disallowed')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaDnaIncAccess.setStatus('mandatory')
rsaDnaCug = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2))
rsaDnaCugRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 1), )
if mibBuilder.loadTexts: rsaDnaCugRowStatusTable.setStatus('mandatory')
rsaDnaCugRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaCugIndex"))
if mibBuilder.loadTexts: rsaDnaCugRowStatusEntry.setStatus('mandatory')
rsaDnaCugRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaDnaCugRowStatus.setStatus('mandatory')
rsaDnaCugComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaCugComponentName.setStatus('mandatory')
rsaDnaCugStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaCugStorageType.setStatus('mandatory')
rsaDnaCugIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)))
if mibBuilder.loadTexts: rsaDnaCugIndex.setStatus('mandatory')
rsaDnaCugCugOptionsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10), )
if mibBuilder.loadTexts: rsaDnaCugCugOptionsTable.setStatus('mandatory')
rsaDnaCugCugOptionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaDnaCugIndex"))
if mibBuilder.loadTexts: rsaDnaCugCugOptionsEntry.setStatus('mandatory')
rsaDnaCugType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("national", 0), ("international", 1))).clone('national')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaDnaCugType.setStatus('mandatory')
rsaDnaCugDnic = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1, 2), DigitString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4).clone(hexValue="30303030")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaDnaCugDnic.setStatus('mandatory')
rsaDnaCugInterlockCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaDnaCugInterlockCode.setStatus('mandatory')
rsaDnaCugPreferential = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('no')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaCugPreferential.setStatus('mandatory')
rsaDnaCugOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disallowed", 0), ("allowed", 1))).clone('disallowed')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaCugOutCalls.setStatus('mandatory')
rsaDnaCugIncCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disallowed", 0), ("allowed", 1))).clone('allowed')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaCugIncCalls.setStatus('mandatory')
rsaDnaCugPrivileged = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 2, 2, 10, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaDnaCugPrivileged.setStatus('mandatory')
rsaVncsAccess = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3))
rsaVncsAccessRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 1), )
if mibBuilder.loadTexts: rsaVncsAccessRowStatusTable.setStatus('mandatory')
rsaVncsAccessRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaVncsAccessIndex"))
if mibBuilder.loadTexts: rsaVncsAccessRowStatusEntry.setStatus('mandatory')
rsaVncsAccessRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaVncsAccessRowStatus.setStatus('mandatory')
rsaVncsAccessComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessComponentName.setStatus('mandatory')
rsaVncsAccessStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessStorageType.setStatus('mandatory')
rsaVncsAccessIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: rsaVncsAccessIndex.setStatus('mandatory')
rsaVncsAccessProvisionedTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 10), )
if mibBuilder.loadTexts: rsaVncsAccessProvisionedTable.setStatus('mandatory')
rsaVncsAccessProvisionedEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaVncsAccessIndex"))
if mibBuilder.loadTexts: rsaVncsAccessProvisionedEntry.setStatus('mandatory')
rsaVncsAccessTimeToLive = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 10, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsaVncsAccessTimeToLive.setStatus('mandatory')
rsaVncsAccessStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 11), )
if mibBuilder.loadTexts: rsaVncsAccessStateTable.setStatus('mandatory')
rsaVncsAccessStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 11, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaVncsAccessIndex"))
if mibBuilder.loadTexts: rsaVncsAccessStateEntry.setStatus('mandatory')
rsaVncsAccessAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessAdminState.setStatus('mandatory')
rsaVncsAccessOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessOperationalState.setStatus('mandatory')
rsaVncsAccessUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 11, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessUsageState.setStatus('mandatory')
rsaVncsAccessOperationalTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 12), )
if mibBuilder.loadTexts: rsaVncsAccessOperationalTable.setStatus('mandatory')
rsaVncsAccessOperationalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 12, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaVncsAccessIndex"))
if mibBuilder.loadTexts: rsaVncsAccessOperationalEntry.setStatus('mandatory')
rsaVncsAccessRequestsSent = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 12, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessRequestsSent.setStatus('mandatory')
rsaVncsAccessRepliesReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 12, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessRepliesReceived.setStatus('mandatory')
rsaVncsAccessRequestsQueued = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 12, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessRequestsQueued.setStatus('mandatory')
rsaVncsAccessRequestsDiscarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 3, 12, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaVncsAccessRequestsDiscarded.setStatus('mandatory')
rsaConnection = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4))
rsaConnectionRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 1), )
if mibBuilder.loadTexts: rsaConnectionRowStatusTable.setStatus('mandatory')
rsaConnectionRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"))
if mibBuilder.loadTexts: rsaConnectionRowStatusEntry.setStatus('mandatory')
rsaConnectionRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionRowStatus.setStatus('mandatory')
rsaConnectionComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionComponentName.setStatus('mandatory')
rsaConnectionStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionStorageType.setStatus('mandatory')
rsaConnectionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1000)))
if mibBuilder.loadTexts: rsaConnectionIndex.setStatus('mandatory')
rsaConnectionOperationalTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 10), )
if mibBuilder.loadTexts: rsaConnectionOperationalTable.setStatus('mandatory')
rsaConnectionOperationalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"))
if mibBuilder.loadTexts: rsaConnectionOperationalEntry.setStatus('mandatory')
rsaConnectionRemoteName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 10, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionRemoteName.setStatus('mandatory')
rsaConnectionCallState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("initializing", 0), ("creatingVc", 1), ("calling", 2), ("acceptingCall", 3), ("registeringFmo", 4), ("establishingLapf", 5), ("dataTransfer", 6), ("clearingCall", 7), ("terminating", 8), ("terminatingVc", 9), ("terminated", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionCallState.setStatus('mandatory')
rsaConnectionServerStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 11), )
if mibBuilder.loadTexts: rsaConnectionServerStatsTable.setStatus('mandatory')
rsaConnectionServerStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 11, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"))
if mibBuilder.loadTexts: rsaConnectionServerStatsEntry.setStatus('mandatory')
rsaConnectionVncsRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 11, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVncsRequests.setStatus('mandatory')
rsaConnectionVncsReplies = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 11, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVncsReplies.setStatus('mandatory')
rsaConnectionLapfStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 12), )
if mibBuilder.loadTexts: rsaConnectionLapfStatusTable.setStatus('mandatory')
rsaConnectionLapfStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 12, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"))
if mibBuilder.loadTexts: rsaConnectionLapfStatusEntry.setStatus('mandatory')
rsaConnectionLapfState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 5, 7))).clone(namedValues=NamedValues(("disconnected", 1), ("linkSetup", 2), ("disconnectRequest", 4), ("informationTransfer", 5), ("waitingAck", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfState.setStatus('mandatory')
rsaConnectionLapfQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 12, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfQueueSize.setStatus('mandatory')
rsaConnectionLapfStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13), )
if mibBuilder.loadTexts: rsaConnectionLapfStatsTable.setStatus('mandatory')
rsaConnectionLapfStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"))
if mibBuilder.loadTexts: rsaConnectionLapfStatsEntry.setStatus('mandatory')
rsaConnectionLapfStateChanges = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfStateChanges.setStatus('mandatory')
rsaConnectionLapfRemoteBusy = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfRemoteBusy.setStatus('mandatory')
rsaConnectionLapfAckTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfAckTimeouts.setStatus('mandatory')
rsaConnectionLapfRejectFramesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfRejectFramesRx.setStatus('mandatory')
rsaConnectionLapfIFramesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfIFramesTx.setStatus('mandatory')
rsaConnectionLapfIFramesTxDiscarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfIFramesTxDiscarded.setStatus('mandatory')
rsaConnectionLapfIFramesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfIFramesRx.setStatus('mandatory')
rsaConnectionLapfIFramesRxDiscarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 13, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionLapfIFramesRxDiscarded.setStatus('mandatory')
rsaConnectionVc = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2))
rsaConnectionVcRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 1), )
if mibBuilder.loadTexts: rsaConnectionVcRowStatusTable.setStatus('mandatory')
rsaConnectionVcRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionVcIndex"))
if mibBuilder.loadTexts: rsaConnectionVcRowStatusEntry.setStatus('mandatory')
rsaConnectionVcRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcRowStatus.setStatus('mandatory')
rsaConnectionVcComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcComponentName.setStatus('mandatory')
rsaConnectionVcStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcStorageType.setStatus('mandatory')
rsaConnectionVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: rsaConnectionVcIndex.setStatus('mandatory')
rsaConnectionVcCadTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10), )
if mibBuilder.loadTexts: rsaConnectionVcCadTable.setStatus('mandatory')
rsaConnectionVcCadEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionVcIndex"))
if mibBuilder.loadTexts: rsaConnectionVcCadEntry.setStatus('mandatory')
rsaConnectionVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("svc", 0), ("pvc", 1), ("spvc", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcType.setStatus('mandatory')
rsaConnectionVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("creating", 0), ("readyP1", 1), ("dteWaitingP2", 2), ("dceWaitingP3", 3), ("dataTransferP4", 4), ("unsupportedP5", 5), ("dteClearRequestP6", 6), ("dceClearIndicationP7", 7), ("termination", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcState.setStatus('mandatory')
rsaConnectionVcPreviousState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("creating", 0), ("readyP1", 1), ("dteWaitingP2", 2), ("dceWaitingP3", 3), ("dataTransferP4", 4), ("unsupportedP5", 5), ("dteClearRequestP6", 6), ("dceClearIndicationP7", 7), ("termination", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPreviousState.setStatus('mandatory')
rsaConnectionVcDiagnosticCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcDiagnosticCode.setStatus('mandatory')
rsaConnectionVcPreviousDiagnosticCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPreviousDiagnosticCode.setStatus('mandatory')
rsaConnectionVcCalledNpi = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("x121", 0), ("e164", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCalledNpi.setStatus('mandatory')
rsaConnectionVcCalledDna = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 7), DigitString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCalledDna.setStatus('mandatory')
rsaConnectionVcCalledLcn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCalledLcn.setStatus('mandatory')
rsaConnectionVcCallingNpi = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("x121", 0), ("e164", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCallingNpi.setStatus('mandatory')
rsaConnectionVcCallingDna = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 10), DigitString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCallingDna.setStatus('mandatory')
rsaConnectionVcCallingLcn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 11), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCallingLcn.setStatus('mandatory')
rsaConnectionVcAccountingEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("yes", 0), ("no", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcAccountingEnabled.setStatus('mandatory')
rsaConnectionVcFastSelectCall = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcFastSelectCall.setStatus('mandatory')
rsaConnectionVcPathReliability = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("high", 0), ("normal", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPathReliability.setStatus('mandatory')
rsaConnectionVcAccountingEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("callingEnd", 0), ("calledEnd", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcAccountingEnd.setStatus('mandatory')
rsaConnectionVcPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("normal", 0), ("high", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPriority.setStatus('mandatory')
rsaConnectionVcSegmentSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 22), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcSegmentSize.setStatus('mandatory')
rsaConnectionVcMaxSubnetPktSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 27), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcMaxSubnetPktSize.setStatus('mandatory')
rsaConnectionVcRcosToNetwork = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("throughput", 0), ("delay", 1), ("multimedia", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcRcosToNetwork.setStatus('mandatory')
rsaConnectionVcRcosFromNetwork = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("throughput", 0), ("delay", 1), ("multimedia", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcRcosFromNetwork.setStatus('mandatory')
rsaConnectionVcEmissionPriorityToNetwork = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("normal", 0), ("high", 1), ("interrupting", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcEmissionPriorityToNetwork.setStatus('mandatory')
rsaConnectionVcEmissionPriorityFromNetwork = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("normal", 0), ("high", 1), ("interrupting", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcEmissionPriorityFromNetwork.setStatus('mandatory')
rsaConnectionVcDataPath = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 10, 1, 32), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcDataPath.setStatus('mandatory')
rsaConnectionVcIntdTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 11), )
if mibBuilder.loadTexts: rsaConnectionVcIntdTable.setStatus('mandatory')
rsaConnectionVcIntdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 11, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionVcIndex"))
if mibBuilder.loadTexts: rsaConnectionVcIntdEntry.setStatus('mandatory')
rsaConnectionVcCallReferenceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 11, 1, 1), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCallReferenceNumber.setStatus('mandatory')
rsaConnectionVcElapsedTimeTillNow = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 11, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcElapsedTimeTillNow.setStatus('mandatory')
rsaConnectionVcSegmentsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 11, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcSegmentsRx.setStatus('mandatory')
rsaConnectionVcSegmentsSent = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 11, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcSegmentsSent.setStatus('mandatory')
rsaConnectionVcStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 11, 1, 5), EnterpriseDateAndTime().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(19, 19), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcStartTime.setStatus('mandatory')
rsaConnectionVcFrdTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12), )
if mibBuilder.loadTexts: rsaConnectionVcFrdTable.setStatus('mandatory')
rsaConnectionVcFrdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionVcIndex"))
if mibBuilder.loadTexts: rsaConnectionVcFrdEntry.setStatus('mandatory')
rsaConnectionVcFrmCongestedToSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcFrmCongestedToSubnet.setStatus('mandatory')
rsaConnectionVcCannotForwardToSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCannotForwardToSubnet.setStatus('mandatory')
rsaConnectionVcNotDataXferToSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcNotDataXferToSubnet.setStatus('mandatory')
rsaConnectionVcOutOfRangeFrmFromSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcOutOfRangeFrmFromSubnet.setStatus('mandatory')
rsaConnectionVcCombErrorsFromSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcCombErrorsFromSubnet.setStatus('mandatory')
rsaConnectionVcDuplicatesFromSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcDuplicatesFromSubnet.setStatus('mandatory')
rsaConnectionVcNotDataXferFromSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcNotDataXferFromSubnet.setStatus('mandatory')
rsaConnectionVcFrmLossTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcFrmLossTimeouts.setStatus('mandatory')
rsaConnectionVcOoSeqByteCntExceeded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcOoSeqByteCntExceeded.setStatus('mandatory')
rsaConnectionVcPeakOoSeqPktCount = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 11), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPeakOoSeqPktCount.setStatus('mandatory')
rsaConnectionVcPeakOoSeqFrmForwarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 12), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPeakOoSeqFrmForwarded.setStatus('mandatory')
rsaConnectionVcSendSequenceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 13), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcSendSequenceNumber.setStatus('mandatory')
rsaConnectionVcPktRetryTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 15), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPktRetryTimeouts.setStatus('mandatory')
rsaConnectionVcPeakRetryQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 16), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPeakRetryQueueSize.setStatus('mandatory')
rsaConnectionVcSubnetRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 17), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcSubnetRecoveries.setStatus('mandatory')
rsaConnectionVcOoSeqPktCntExceeded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 19), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcOoSeqPktCntExceeded.setStatus('mandatory')
rsaConnectionVcPeakOoSeqByteCount = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 12, 1, 20), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 50000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcPeakOoSeqByteCount.setStatus('mandatory')
rsaConnectionVcDmepTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 417), )
if mibBuilder.loadTexts: rsaConnectionVcDmepTable.setStatus('mandatory')
rsaConnectionVcDmepEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 417, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionVcIndex"), (0, "Nortel-Magellan-Passport-ServerAccessRsaMIB", "rsaConnectionVcDmepValue"))
if mibBuilder.loadTexts: rsaConnectionVcDmepEntry.setStatus('mandatory')
rsaConnectionVcDmepValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 108, 4, 2, 417, 1, 1), RowPointer()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsaConnectionVcDmepValue.setStatus('mandatory')
serverAccessRsaGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 1))
serverAccessRsaGroupBE = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 1, 5))
serverAccessRsaGroupBE01 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 1, 5, 2))
serverAccessRsaGroupBE01A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 1, 5, 2, 2))
serverAccessRsaCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 3))
serverAccessRsaCapabilitiesBE = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 3, 5))
serverAccessRsaCapabilitiesBE01 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 3, 5, 2))
serverAccessRsaCapabilitiesBE01A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 116, 3, 5, 2, 2))
mibBuilder.exportSymbols("Nortel-Magellan-Passport-ServerAccessRsaMIB", rsaConnectionVcPeakRetryQueueSize=rsaConnectionVcPeakRetryQueueSize, rsaConnectionLapfIFramesTx=rsaConnectionLapfIFramesTx, rsaDnaRowStatusTable=rsaDnaRowStatusTable, rsaConnectionOperationalEntry=rsaConnectionOperationalEntry, rsaDnaIncomingOptionsEntry=rsaDnaIncomingOptionsEntry, rsaConnectionVcCalledDna=rsaConnectionVcCalledDna, rsaConnectionVncsRequests=rsaConnectionVncsRequests, rsaConnectionVcFrmLossTimeouts=rsaConnectionVcFrmLossTimeouts, rsaDnaCugRowStatusTable=rsaDnaCugRowStatusTable, rsaOptionsTable=rsaOptionsTable, rsaConnectionVcEmissionPriorityFromNetwork=rsaConnectionVcEmissionPriorityFromNetwork, rsaDnaCugStorageType=rsaDnaCugStorageType, rsaConnectionVcPktRetryTimeouts=rsaConnectionVcPktRetryTimeouts, rsaDnaStorageType=rsaDnaStorageType, rsaConnectionVcCallReferenceNumber=rsaConnectionVcCallReferenceNumber, rsaDnaCugPrivileged=rsaDnaCugPrivileged, rsaOperationalState=rsaOperationalState, rsaVncsAccessOperationalTable=rsaVncsAccessOperationalTable, rsaConnectionVcDiagnosticCode=rsaConnectionVcDiagnosticCode, rsaComponentName=rsaComponentName, rsaConnectionVcDuplicatesFromSubnet=rsaConnectionVcDuplicatesFromSubnet, rsaDnaCugDnic=rsaDnaCugDnic, rsa=rsa, rsaMaxRsiConnections=rsaMaxRsiConnections, rsaConnectionVcCallingDna=rsaConnectionVcCallingDna, rsaConnectionVcPriority=rsaConnectionVcPriority, rsaUsageState=rsaUsageState, rsaRowStatusTable=rsaRowStatusTable, rsaOperationalEntry=rsaOperationalEntry, rsaVncsAccessRequestsDiscarded=rsaVncsAccessRequestsDiscarded, rsaConnectionVcCannotForwardToSubnet=rsaConnectionVcCannotForwardToSubnet, rsaRowStatus=rsaRowStatus, rsaVncsAccessIndex=rsaVncsAccessIndex, rsaConnectionVcPeakOoSeqByteCount=rsaConnectionVcPeakOoSeqByteCount, rsaConnectionVc=rsaConnectionVc, rsaDnaRowStatusEntry=rsaDnaRowStatusEntry, rsaConnectionOperationalTable=rsaConnectionOperationalTable, rsaDnaComponentName=rsaDnaComponentName, rsaConnectionVcCallingLcn=rsaConnectionVcCallingLcn, rsaVncsAccessOperationalState=rsaVncsAccessOperationalState, rsaDnaNumberingPlanIndicator=rsaDnaNumberingPlanIndicator, rsaOptionsEntry=rsaOptionsEntry, rsaConnectionVcNotDataXferToSubnet=rsaConnectionVcNotDataXferToSubnet, rsaConnectionLapfIFramesTxDiscarded=rsaConnectionLapfIFramesTxDiscarded, rsaDnaOutgoingOptionsEntry=rsaDnaOutgoingOptionsEntry, rsaConnectionVcIntdEntry=rsaConnectionVcIntdEntry, rsaConnectionLapfState=rsaConnectionLapfState, rsaConnectionVcRcosFromNetwork=rsaConnectionVcRcosFromNetwork, rsaConnectionVcFrmCongestedToSubnet=rsaConnectionVcFrmCongestedToSubnet, rsaLogicalProcessor=rsaLogicalProcessor, serverAccessRsaGroupBE=serverAccessRsaGroupBE, rsaDnaAddressTable=rsaDnaAddressTable, serverAccessRsaCapabilities=serverAccessRsaCapabilities, rsaVncsAccessStateEntry=rsaVncsAccessStateEntry, rsaConnectionVcPeakOoSeqPktCount=rsaConnectionVcPeakOoSeqPktCount, rsaDnaCugOutCalls=rsaDnaCugOutCalls, rsaDnaCugCugOptionsTable=rsaDnaCugCugOptionsTable, rsaConnectionVcFrdTable=rsaConnectionVcFrdTable, rsaVncsAccessProvisionedTable=rsaVncsAccessProvisionedTable, rsaConnectionLapfAckTimeouts=rsaConnectionLapfAckTimeouts, rsaConnectionVcPeakOoSeqFrmForwarded=rsaConnectionVcPeakOoSeqFrmForwarded, rsaDnaCugPreferential=rsaDnaCugPreferential, rsaConnectionVcCombErrorsFromSubnet=rsaConnectionVcCombErrorsFromSubnet, serverAccessRsaCapabilitiesBE01A=serverAccessRsaCapabilitiesBE01A, rsaConnectionLapfStatusTable=rsaConnectionLapfStatusTable, rsaStateTable=rsaStateTable, rsaConnectionVcStorageType=rsaConnectionVcStorageType, rsaConnectionLapfIFramesRxDiscarded=rsaConnectionLapfIFramesRxDiscarded, rsaDnaCug=rsaDnaCug, rsaConnectionStorageType=rsaConnectionStorageType, rsaConnectionVcCalledLcn=rsaConnectionVcCalledLcn, serverAccessRsaCapabilitiesBE01=serverAccessRsaCapabilitiesBE01, rsaRowStatusEntry=rsaRowStatusEntry, rsaVncsAccessRowStatusEntry=rsaVncsAccessRowStatusEntry, rsaDnaIncCalls=rsaDnaIncCalls, serverAccessRsaGroupBE01A=serverAccessRsaGroupBE01A, rsaDnaCugComponentName=rsaDnaCugComponentName, rsaConnectionLapfStateChanges=rsaConnectionLapfStateChanges, rsaConnectionRowStatus=rsaConnectionRowStatus, rsaDna=rsaDna, rsaConnectionVcDmepValue=rsaConnectionVcDmepValue, rsaDnaCugType=rsaDnaCugType, rsaConnectionVcStartTime=rsaConnectionVcStartTime, rsaDnaRowStatus=rsaDnaRowStatus, rsaDnaIncAccess=rsaDnaIncAccess, rsaVncsAccessTimeToLive=rsaVncsAccessTimeToLive, rsaConnectionVcSubnetRecoveries=rsaConnectionVcSubnetRecoveries, rsaDnaIncomingOptionsTable=rsaDnaIncomingOptionsTable, rsaConnectionVcIntdTable=rsaConnectionVcIntdTable, rsaConnectionIndex=rsaConnectionIndex, rsaConnectionComponentName=rsaConnectionComponentName, rsaConnectionVcRowStatusEntry=rsaConnectionVcRowStatusEntry, rsaConnectionVcSendSequenceNumber=rsaConnectionVcSendSequenceNumber, rsaConnectionVcDmepTable=rsaConnectionVcDmepTable, rsaConnectionVcOoSeqByteCntExceeded=rsaConnectionVcOoSeqByteCntExceeded, rsaConnectionLapfStatusEntry=rsaConnectionLapfStatusEntry, rsaDnaCugRowStatus=rsaDnaCugRowStatus, rsaConnectionVcCadTable=rsaConnectionVcCadTable, rsaVncsAccessRowStatusTable=rsaVncsAccessRowStatusTable, rsaStateEntry=rsaStateEntry, rsaConnectionVcOutOfRangeFrmFromSubnet=rsaConnectionVcOutOfRangeFrmFromSubnet, rsaConnectionRemoteName=rsaConnectionRemoteName, serverAccessRsaGroupBE01=serverAccessRsaGroupBE01, serverAccessRsaGroup=serverAccessRsaGroup, rsaConnectionVcAccountingEnabled=rsaConnectionVcAccountingEnabled, rsaConnectionVcOoSeqPktCntExceeded=rsaConnectionVcOoSeqPktCntExceeded, rsaDnaCugCugOptionsEntry=rsaDnaCugCugOptionsEntry, rsaVncsAccess=rsaVncsAccess, rsaAdminState=rsaAdminState, rsaDnaIndex=rsaDnaIndex, rsaVncsAccessProvisionedEntry=rsaVncsAccessProvisionedEntry, rsaConnectionVcPathReliability=rsaConnectionVcPathReliability, rsaConnectionVcRowStatus=rsaConnectionVcRowStatus, rsaDnaCugInterlockCode=rsaDnaCugInterlockCode, rsaVncsAccessRowStatus=rsaVncsAccessRowStatus, rsaConnectionVcRowStatusTable=rsaConnectionVcRowStatusTable, rsaConnectionVcMaxSubnetPktSize=rsaConnectionVcMaxSubnetPktSize, rsaConnectionVcSegmentsRx=rsaConnectionVcSegmentsRx, rsaVncsAccessAdminState=rsaVncsAccessAdminState, rsaDnaOutCalls=rsaDnaOutCalls, rsaConnection=rsaConnection, rsaConnectionServerStatsTable=rsaConnectionServerStatsTable, rsaConnectionVcType=rsaConnectionVcType, rsaConnectionVcAccountingEnd=rsaConnectionVcAccountingEnd, rsaConnectionVcCalledNpi=rsaConnectionVcCalledNpi, rsaOperationalTable=rsaOperationalTable, rsaConnectionVcFrdEntry=rsaConnectionVcFrdEntry, rsaDnaCugIndex=rsaDnaCugIndex, rsaVncsAccessRequestsSent=rsaVncsAccessRequestsSent, rsaVncsAccessRepliesReceived=rsaVncsAccessRepliesReceived, rsaConnectionVcPreviousState=rsaConnectionVcPreviousState, rsaDnaCugRowStatusEntry=rsaDnaCugRowStatusEntry, serverAccessRsaCapabilitiesBE=serverAccessRsaCapabilitiesBE, rsaConnectionRowStatusEntry=rsaConnectionRowStatusEntry, rsaStorageType=rsaStorageType, rsaConnectionVcComponentName=rsaConnectionVcComponentName, rsaVncsAccessStorageType=rsaVncsAccessStorageType, rsaConnectionVcState=rsaConnectionVcState, rsaConnectionVncsReplies=rsaConnectionVncsReplies, rsaConnectionVcFastSelectCall=rsaConnectionVcFastSelectCall, rsaConnectionVcSegmentSize=rsaConnectionVcSegmentSize, rsaConnectionVcRcosToNetwork=rsaConnectionVcRcosToNetwork, rsaConnectionCallState=rsaConnectionCallState, rsaConnectionVcEmissionPriorityToNetwork=rsaConnectionVcEmissionPriorityToNetwork, rsaDnaAddressEntry=rsaDnaAddressEntry, rsaRsiConnections=rsaRsiConnections, rsaConnectionVcDataPath=rsaConnectionVcDataPath, rsaConnectionVcSegmentsSent=rsaConnectionVcSegmentsSent, rsaVncsAccessComponentName=rsaVncsAccessComponentName, rsaDnaDataNetworkAddress=rsaDnaDataNetworkAddress, rsaConnectionVcPreviousDiagnosticCode=rsaConnectionVcPreviousDiagnosticCode, rsaConnectionVcElapsedTimeTillNow=rsaConnectionVcElapsedTimeTillNow, rsaConnectionLapfStatsEntry=rsaConnectionLapfStatsEntry, rsaConnectionVcDmepEntry=rsaConnectionVcDmepEntry, rsaConnectionLapfQueueSize=rsaConnectionLapfQueueSize, rsaDnaCugIncCalls=rsaDnaCugIncCalls, rsaVncsAccessUsageState=rsaVncsAccessUsageState, rsaIndex=rsaIndex, rsaConnectionVcCadEntry=rsaConnectionVcCadEntry, rsaVncsAccessOperationalEntry=rsaVncsAccessOperationalEntry, rsaConnectionLapfRemoteBusy=rsaConnectionLapfRemoteBusy, rsaDnaOutgoingOptionsTable=rsaDnaOutgoingOptionsTable, rsaConnectionVcCallingNpi=rsaConnectionVcCallingNpi, rsaConnectionLapfRejectFramesRx=rsaConnectionLapfRejectFramesRx, rsaConnectionVcNotDataXferFromSubnet=rsaConnectionVcNotDataXferFromSubnet, rsaConnectionRowStatusTable=rsaConnectionRowStatusTable, rsaConnectionLapfIFramesRx=rsaConnectionLapfIFramesRx, rsaConnectionVcIndex=rsaConnectionVcIndex, serverAccessRsaMIB=serverAccessRsaMIB, rsaConnectionServerStatsEntry=rsaConnectionServerStatsEntry, rsaConnectionLapfStatsTable=rsaConnectionLapfStatsTable, rsaVncsAccessStateTable=rsaVncsAccessStateTable, rsaVncsAccessRequestsQueued=rsaVncsAccessRequestsQueued)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
f75fc4dbabe8144a65242333ea6b50c9c71cc1ee | 5e9fe8ea97cb75326566b6469b7c4903aff680dc | /models/academic/test.py | 6b6a3ab1cdbeb5239df483a71c28ef113bcd2cc6 | [] | no_license | Trilokan/vetrivel | a2f5444cff88319c4bde2016694ee084df5912fc | e6d4f1cae23f70a3b82da2e5971706d4b417f056 | refs/heads/master | 2020-04-22T13:16:32.663207 | 2019-05-01T11:04:22 | 2019-05-01T11:04:22 | 170,403,357 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,043 | py | # -*- coding: utf-8 -*-
from odoo import models, fields, api
PROGRESS_INFO = [("draft", "Draft"),
("confirmed", "Confirmed"),
("done", "Done"),
("cancel", "Cancel")]
class ArcTest(models.Model):
_name = "arc.test"
date = fields.Date(string="Date", default="", required=True)
name = fields.Char(string="Name", readonly=True)
academic_id = fields.Many2one(comodel_name="arc.academic", string="Academic")
standard_id = fields.Many2one(comodel_name="arc.standard", string="Standard")
section_id = fields.Many2one(comodel_name="arc.section", string="Section", required=True)
subject_id = fields.Many2one(comodel_name="arc.subject", string="Subject", required=True)
syllabus_id = fields.Many2one(comodel_name="arc.syllabus", string="Syllabus", required=True)
teacher_id = fields.Many2one(comodel_name="arc.person", string="Teacher", required=True)
total_marks = fields.Float(string="Total Marks", default=0.0, required=True)
test_detail = fields.One2many(comodel_name="arc.test.detail", inverse_name="test_id")
progress = fields.Selection(selection=PROGRESS_INFO, default="draft")
@api.multi
def add_test_detail(self):
pass
@api.multi
def trigger_done(self):
self.write({"progress": "done"})
@api.model
def create(self, vals):
vals["name"] = 0
section_id = self.env["arc.section"].search([("id", "=", vals["section_id"])])
vals["academic_id"] = section_id
vals["standard_id"] = section_id
vals["academic_id"] = section_id
vals["academic_id"] = section_id
return super(ArcTest, self).create(vals)
class ArcTestDetail(models.Model):
_name = "arc.test.detail"
student_id = fields.Many2one(comodel_name="arc.student", string="Student")
marks = fields.Float(string="Total Marks")
test_id = fields.Many2one(comodel_name="arc.test", string="Test")
progress = fields.Selection(selection=PROGRESS_INFO, related="test_id.progress")
| [
"ram@hk.com"
] | ram@hk.com |
d17005dff1ae118402fafd7e028e8a38dd73de61 | 78c4b89e8b4d6ca2eeb1ad8979bfa19f8ce67c06 | /Curso em Video/desafio066b.py | aee4bf97cc810bd2fad09b5677c464a5580044aa | [] | no_license | lucasoliveiraprofissional/Cursos-Python | 75d42b60167cfdf4324e615f8e24868d324de350 | 3eb000949f159ff69b7b92dbb5ff37df1cd0e1de | refs/heads/main | 2023-01-28T19:35:54.490747 | 2020-12-01T01:11:02 | 2020-12-01T01:11:02 | 315,455,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 723 | py | '''Crie um programa que leia vários números
inteiros pelo teclado. O programa só vai parar
quando o usuário digitar o valor 999, que é a
condição de parada. No final, mostre quantos números
foram digitados e qual foi a soma entre eles
(desconsiderando o flag)'''
'''Copiei a versão dele já de cara pois fiquei
mais de uma semana sem treinar Python'''
soma = cont = 0
while True:
num= int(input('Digite um valor (999 para parar): '))
if num == 999:
break
#tem que ser desse modo, antes dele chegar a somar, tem
#que cortar o barato dele, não pode deixar ele chegar a
#somar. Por isso esse if vem antes.
cont += 1
soma += num
print(f'A soma dos {cont} valores foi: {soma}')
| [
"lucas.oliveira@quaestum.com.br"
] | lucas.oliveira@quaestum.com.br |
3ed5f3604345089482ce1a65e6c380353de26a12 | b212ec9d705fb77cac102dceb12eb668099fd1ae | /advanced/exams/retake_april_2020/find_the_eggs.py | 3387b0e159d1bb4a370dc576f7ad2e8ee941d536 | [] | no_license | xpucko/Software-University-SoftUni | 20ef91a0be91a8a09a56d9fdc15888f91409de2f | a1fc1781424f025954948299be7f75d317e32dc1 | refs/heads/master | 2023-02-04T11:58:33.068431 | 2020-12-24T00:39:11 | 2020-12-24T00:39:11 | 280,227,310 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | def find_strongest_eggs(*args):
nums, sublist_count = args
matrix = [[] for _ in range(sublist_count)]
while nums:
for i in range(len(matrix)):
matrix[i].append(nums[0])
nums.pop(0)
result = []
for sublist in matrix:
left_num = sublist[(len(sublist) // 2) - 1]
mid_num = sublist[len(sublist) // 2]
right_num = sublist[(len(sublist) // 2) + 1]
cond_1 = left_num < mid_num > right_num
cond_2 = left_num < right_num
if cond_1 and cond_2:
result.append(mid_num)
return result
| [
"hristiyan.plamenov.valchev@gmail.com"
] | hristiyan.plamenov.valchev@gmail.com |
5b0105d2c2eee49411aac32a2215f322fa9297fe | a60e81b51935fb53c0900fecdadba55d86110afe | /python/note/上下文管理器/context_manager.py | cb902170d1e29a50a78f3b9cbdc96dc0205641cc | [] | no_license | FrankieZhen/Lookoop | fab6855f5660467f70dc5024d9aa38213ecf48a7 | 212f8b83d6ac22db1a777f980075d9e12ce521d2 | refs/heads/master | 2020-07-27T08:12:45.887814 | 2019-09-16T11:48:20 | 2019-09-16T11:48:20 | 209,021,915 | 1 | 0 | null | 2019-09-17T10:10:46 | 2019-09-17T10:10:46 | null | UTF-8 | Python | false | false | 699 | py | # coding=utf-8
# 2019-1-28
# 上下文管理器
# __enter__ 与 __exit__ 用于资源分配以及释放相关工作, 如打开关闭文件, 异常处理, 断开流的链接以及锁分配
class MyContextManager(object):
def __enter__(self):
print("extering...")
def __exit__(self, exception_type, exception_value, traceback):
print("leaving...")
if exception_type is None:
print("No Exception.")
return False
elif exception_type is ValueError:
print("Value error")
return False
else:
print("other error")
return True
if __name__ == '__main__':
# with MyContextManager():
# print("Testing..")
# raise(ValueError)
with MyContextManager():
print("Testing..") | [
"33798487+YangXiaoo@users.noreply.github.com"
] | 33798487+YangXiaoo@users.noreply.github.com |
ed0b68197695bd7616d45d70acdfda006ea8500d | 28cab1ef484a5796fc9b0897043e918f9a28e650 | /dalangshen/urls.py | 6fddb5ed1f496258092b663337e2019cbb419cce | [] | no_license | bxxfighting/dalangshen | 12cb58d2078804327dbf7a01be0fc2a0d27f4495 | e174147b8778c188941d5fd0f5e33de65afc8b00 | refs/heads/main | 2023-01-15T08:07:57.429342 | 2020-11-16T03:49:34 | 2020-11-16T03:49:34 | 313,184,879 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from django.urls import path
from django.urls import include
urlpatterns = [
path('api/v1/', include({
path('account/', include('account.urls')),
}))
]
| [
"boxingxing@limikeji.com"
] | boxingxing@limikeji.com |
3bad10e51318c95648d6dfaaa3001e5eea3ee6f1 | 19d47d47c9614dddcf2f8d744d883a90ade0ce82 | /pynsxt/swagger_client/models/lb_service_status.py | 967dd44ad83b922fe935de26aee7127d94b9d8b4 | [] | no_license | darshanhuang1/pynsxt-1 | 9ed7c0da9b3a64e837a26cbbd8b228e811cee823 | fb1091dff1af7f8b8f01aec715682dea60765eb8 | refs/heads/master | 2020-05-25T14:51:09.932853 | 2018-05-16T12:43:48 | 2018-05-16T12:43:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,980 | py | # coding: utf-8
"""
NSX API
VMware NSX REST API # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.lb_pool_status import LbPoolStatus # noqa: F401,E501
from swagger_client.models.lb_virtual_server_status import LbVirtualServerStatus # noqa: F401,E501
class LbServiceStatus(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'pools': 'list[LbPoolStatus]',
'cpu_usage': 'int',
'active_transport_nodes': 'list[str]',
'memory_usage': 'int',
'service_id': 'str',
'last_update_timestamp': 'int',
'standby_transport_nodes': 'list[str]',
'error_message': 'str',
'virtual_servers': 'list[LbVirtualServerStatus]',
'service_status': 'str'
}
attribute_map = {
'pools': 'pools',
'cpu_usage': 'cpu_usage',
'active_transport_nodes': 'active_transport_nodes',
'memory_usage': 'memory_usage',
'service_id': 'service_id',
'last_update_timestamp': 'last_update_timestamp',
'standby_transport_nodes': 'standby_transport_nodes',
'error_message': 'error_message',
'virtual_servers': 'virtual_servers',
'service_status': 'service_status'
}
def __init__(self, pools=None, cpu_usage=None, active_transport_nodes=None, memory_usage=None, service_id=None, last_update_timestamp=None, standby_transport_nodes=None, error_message=None, virtual_servers=None, service_status=None): # noqa: E501
"""LbServiceStatus - a model defined in Swagger""" # noqa: E501
self._pools = None
self._cpu_usage = None
self._active_transport_nodes = None
self._memory_usage = None
self._service_id = None
self._last_update_timestamp = None
self._standby_transport_nodes = None
self._error_message = None
self._virtual_servers = None
self._service_status = None
self.discriminator = None
if pools is not None:
self.pools = pools
if cpu_usage is not None:
self.cpu_usage = cpu_usage
if active_transport_nodes is not None:
self.active_transport_nodes = active_transport_nodes
if memory_usage is not None:
self.memory_usage = memory_usage
self.service_id = service_id
if last_update_timestamp is not None:
self.last_update_timestamp = last_update_timestamp
if standby_transport_nodes is not None:
self.standby_transport_nodes = standby_transport_nodes
if error_message is not None:
self.error_message = error_message
if virtual_servers is not None:
self.virtual_servers = virtual_servers
if service_status is not None:
self.service_status = service_status
@property
def pools(self):
"""Gets the pools of this LbServiceStatus. # noqa: E501
status of load balancer pools # noqa: E501
:return: The pools of this LbServiceStatus. # noqa: E501
:rtype: list[LbPoolStatus]
"""
return self._pools
@pools.setter
def pools(self, pools):
"""Sets the pools of this LbServiceStatus.
status of load balancer pools # noqa: E501
:param pools: The pools of this LbServiceStatus. # noqa: E501
:type: list[LbPoolStatus]
"""
self._pools = pools
@property
def cpu_usage(self):
"""Gets the cpu_usage of this LbServiceStatus. # noqa: E501
Cpu usage in percentage # noqa: E501
:return: The cpu_usage of this LbServiceStatus. # noqa: E501
:rtype: int
"""
return self._cpu_usage
@cpu_usage.setter
def cpu_usage(self, cpu_usage):
"""Sets the cpu_usage of this LbServiceStatus.
Cpu usage in percentage # noqa: E501
:param cpu_usage: The cpu_usage of this LbServiceStatus. # noqa: E501
:type: int
"""
self._cpu_usage = cpu_usage
@property
def active_transport_nodes(self):
"""Gets the active_transport_nodes of this LbServiceStatus. # noqa: E501
Ids of load balancer service related active transport nodes # noqa: E501
:return: The active_transport_nodes of this LbServiceStatus. # noqa: E501
:rtype: list[str]
"""
return self._active_transport_nodes
@active_transport_nodes.setter
def active_transport_nodes(self, active_transport_nodes):
"""Sets the active_transport_nodes of this LbServiceStatus.
Ids of load balancer service related active transport nodes # noqa: E501
:param active_transport_nodes: The active_transport_nodes of this LbServiceStatus. # noqa: E501
:type: list[str]
"""
self._active_transport_nodes = active_transport_nodes
@property
def memory_usage(self):
"""Gets the memory_usage of this LbServiceStatus. # noqa: E501
Memory usage in percentage # noqa: E501
:return: The memory_usage of this LbServiceStatus. # noqa: E501
:rtype: int
"""
return self._memory_usage
@memory_usage.setter
def memory_usage(self, memory_usage):
"""Sets the memory_usage of this LbServiceStatus.
Memory usage in percentage # noqa: E501
:param memory_usage: The memory_usage of this LbServiceStatus. # noqa: E501
:type: int
"""
self._memory_usage = memory_usage
@property
def service_id(self):
"""Gets the service_id of this LbServiceStatus. # noqa: E501
Load balancer service identifier # noqa: E501
:return: The service_id of this LbServiceStatus. # noqa: E501
:rtype: str
"""
return self._service_id
@service_id.setter
def service_id(self, service_id):
"""Sets the service_id of this LbServiceStatus.
Load balancer service identifier # noqa: E501
:param service_id: The service_id of this LbServiceStatus. # noqa: E501
:type: str
"""
if service_id is None:
raise ValueError("Invalid value for `service_id`, must not be `None`") # noqa: E501
self._service_id = service_id
@property
def last_update_timestamp(self):
"""Gets the last_update_timestamp of this LbServiceStatus. # noqa: E501
Timestamp when the data was last updated # noqa: E501
:return: The last_update_timestamp of this LbServiceStatus. # noqa: E501
:rtype: int
"""
return self._last_update_timestamp
@last_update_timestamp.setter
def last_update_timestamp(self, last_update_timestamp):
"""Sets the last_update_timestamp of this LbServiceStatus.
Timestamp when the data was last updated # noqa: E501
:param last_update_timestamp: The last_update_timestamp of this LbServiceStatus. # noqa: E501
:type: int
"""
self._last_update_timestamp = last_update_timestamp
@property
def standby_transport_nodes(self):
"""Gets the standby_transport_nodes of this LbServiceStatus. # noqa: E501
Ids of load balancer service related standby transport nodes # noqa: E501
:return: The standby_transport_nodes of this LbServiceStatus. # noqa: E501
:rtype: list[str]
"""
return self._standby_transport_nodes
@standby_transport_nodes.setter
def standby_transport_nodes(self, standby_transport_nodes):
"""Sets the standby_transport_nodes of this LbServiceStatus.
Ids of load balancer service related standby transport nodes # noqa: E501
:param standby_transport_nodes: The standby_transport_nodes of this LbServiceStatus. # noqa: E501
:type: list[str]
"""
self._standby_transport_nodes = standby_transport_nodes
@property
def error_message(self):
"""Gets the error_message of this LbServiceStatus. # noqa: E501
Error message, if available # noqa: E501
:return: The error_message of this LbServiceStatus. # noqa: E501
:rtype: str
"""
return self._error_message
@error_message.setter
def error_message(self, error_message):
"""Sets the error_message of this LbServiceStatus.
Error message, if available # noqa: E501
:param error_message: The error_message of this LbServiceStatus. # noqa: E501
:type: str
"""
self._error_message = error_message
@property
def virtual_servers(self):
"""Gets the virtual_servers of this LbServiceStatus. # noqa: E501
status of load balancer virtual servers # noqa: E501
:return: The virtual_servers of this LbServiceStatus. # noqa: E501
:rtype: list[LbVirtualServerStatus]
"""
return self._virtual_servers
@virtual_servers.setter
def virtual_servers(self, virtual_servers):
"""Sets the virtual_servers of this LbServiceStatus.
status of load balancer virtual servers # noqa: E501
:param virtual_servers: The virtual_servers of this LbServiceStatus. # noqa: E501
:type: list[LbVirtualServerStatus]
"""
self._virtual_servers = virtual_servers
@property
def service_status(self):
"""Gets the service_status of this LbServiceStatus. # noqa: E501
UP means the load balancer service is working fine on both transport-nodes(if have); DOWN means the load balancer service is down on both transport-nodes (if have), hence the load balancer will not respond to any requests; ERROR means error happens on transport-node(s) or no status is reported from transport-node(s). The load balancer service may be working (or not working); NO_STANDBY means load balancer service is working in one of the transport node while not in the other transport-node (if have). Hence if the load balancer service in the working transport-node goes down, the load balancer service will go down; DETACHED means that the load balancer service has no attachment setting and is not instantiated in any transport nodes; DISABLED means that admin state of load balancer service is DISABLED; UNKNOWN means that no status reported from transport-nodes.The load balancer service may be working(or not working). # noqa: E501
:return: The service_status of this LbServiceStatus. # noqa: E501
:rtype: str
"""
return self._service_status
@service_status.setter
def service_status(self, service_status):
"""Sets the service_status of this LbServiceStatus.
UP means the load balancer service is working fine on both transport-nodes(if have); DOWN means the load balancer service is down on both transport-nodes (if have), hence the load balancer will not respond to any requests; ERROR means error happens on transport-node(s) or no status is reported from transport-node(s). The load balancer service may be working (or not working); NO_STANDBY means load balancer service is working in one of the transport node while not in the other transport-node (if have). Hence if the load balancer service in the working transport-node goes down, the load balancer service will go down; DETACHED means that the load balancer service has no attachment setting and is not instantiated in any transport nodes; DISABLED means that admin state of load balancer service is DISABLED; UNKNOWN means that no status reported from transport-nodes.The load balancer service may be working(or not working). # noqa: E501
:param service_status: The service_status of this LbServiceStatus. # noqa: E501
:type: str
"""
allowed_values = ["UP", "DOWN", "ERROR", "NO_STANDBY", "DETACHED", "DISABLED", "UNKNOWN"] # noqa: E501
if service_status not in allowed_values:
raise ValueError(
"Invalid value for `service_status` ({0}), must be one of {1}" # noqa: E501
.format(service_status, allowed_values)
)
self._service_status = service_status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LbServiceStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"tcraft@pivotal.io"
] | tcraft@pivotal.io |
fd0d07b0637a2c64fcfc0b85df0f40ca42457059 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_15695.py | b7ea853e6f7881ca090ce7e50b8f64f6d852da95 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | # calculate 95 percentile of the list values in python
import numpy as np
for i in finalvalues.values():
print np.percentile(map(int,i),95)
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
ca4b84868e25eb0cd3e66a89dca671d1d81c0945 | 36ab3aef58c145dfb68e027c0736bbe95b6bb2f3 | /tda_mvc/view/main.py | 62a022b8860f77d02c9e3383e9ee4505606a5311 | [
"MIT"
] | permissive | jjjkkkjjj/cldvis-txtmodifier | 658fb468610c33b9ccb370dd4b532fbd686aa56b | 647c57cdba325d54229b3300f7d0b18baa7b0b74 | refs/heads/master | 2023-04-18T21:59:18.598593 | 2021-05-02T07:41:06 | 2021-05-02T07:41:06 | 295,118,055 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,672 | py | from PySide2.QtWidgets import *
from PySide2.QtGui import *
from PySide2.QtCore import *
from ..utils.funcs import check_instance, create_action, add_actions
from ..utils.modes import ShowingMode
from ..model import Model
from .leftdock import LeftDockView
from .central import CentralView
from .rightdock import RightDockView
class MainView(QWidget):
def __init__(self, model, parent=None):
super().__init__(parent=parent)
self.model = check_instance('model', model, Model)
self.initUI()
def initUI(self):
self.leftdock = LeftDockView(self.model, self)
self.central = CentralView(self.model, self)
self.rightdock = RightDockView(self.model, self)
# create layout
hbox = QHBoxLayout()
# leftdock
hbox.addWidget(self.leftdock, 1)
# canvas as central widget
hbox.addWidget(self.central, 7)
# rightdock
hbox.addWidget(self.rightdock, 2)
self.setLayout(hbox)
class MenuBar(QMenuBar):
### Attributes ###
# menu
menu_file: QMenu
menu_edit: QMenu
menu_viewer: QMenu
menu_prediction: QMenu
menu_help: QMenu
# action
# file menu
action_openfolder: QAction
action_openfiles: QAction
action_savetda: QAction
action_saveastda: QAction
action_loadtda: QAction
action_backfile: QAction
action_forwardfile: QAction
action_exportCSV: QAction
action_exportDataset: QAction
action_exit: QAction
# edit menu
action_undo: QAction
action_redo: QAction
# view menu
action_zoomin: QAction
action_zoomout: QAction
action_showentire: QAction
action_showselected: QAction
# prediction menu
action_predict: QAction
action_done: QAction
action_discard: QAction
action_areaRectMode: QAction
action_areaQuadMode: QAction
action_predictImageMode: QAction
action_predictDocumentMode: QAction
# help menu
action_about: QAction
action_preferences: QAction
model: Model
def __init__(self, model: Model, parent=None):
super().__init__(parent=parent)
self.model = check_instance('model', model, Model)
self.initUI()
self.updateUI()
self.updateLanguage()
def initUI(self):
self.menu_file = self.addMenu('&File')
self.menu_edit = self.addMenu('&Edit')
self.menu_viewer = self.addMenu('&View')
self.menu_prediction = self.addMenu('&Prediction')
self.menu_help = self.addMenu('&Help')
##### File #####
# open folder
self.action_openfolder = create_action(self, "&Open Folder", slot=None,
shortcut="Ctrl+O", tip="Open a folder")
# open files
self.action_openfiles = create_action(self, "&Open Files", slot=None,
shortcut="Ctrl+Shift+O", tip="Open files")
# save
self.action_savetda = create_action(self, "&Save", slot=None,
shortcut="Ctrl+S", tip="Save a tda file in dataset directory")
# save as tda
self.action_saveastda = create_action(self, "&Save as tda", slot=None,
shortcut="Ctrl+Shift+S", tip="Save a tda file in specific location")
# load
self.action_loadtda = create_action(self, "&Load tda", slot=None,
shortcut="Ctrl+l", tip="Load a tda file from specific location")
# back file
self.action_backfile = create_action(self, "&Back File", slot=None,
shortcut="Alt+Left", tip="Back to a previous file")
# forward file
self.action_forwardfile = create_action(self, "&Forward File", slot=None,
shortcut="Alt+Right", tip="Forward to a next file")
# export csv
self.action_exportCSV = create_action(self, "&Export File", slot=None,
shortcut="Ctrl+E", tip="Export a file for table to specific location")
# export dataset
self.action_exportDataset = create_action(self, "&Export Dataset", slot=None,
shortcut="Ctrl+Shift+E", tip="Export a file for dataset to specific location")
# exit
self.action_exit = create_action(self, "&Exit", slot=None,
shortcut="Ctrl+Q", tip="Quit this application")
add_actions(self.menu_file, (self.action_openfolder, self.action_openfiles, None,
self.action_savetda, self.action_saveastda, self.action_loadtda, None,
self.action_backfile, self.action_forwardfile, None,
self.action_exportCSV, self.action_exportDataset, None,
self.action_exit))
##### Edit #####
# undo
self.action_undo = create_action(self, '&Undo', slot=None,
shortcut='Ctrl+Z', tip='Undo')
# redo
self.action_redo = create_action(self, '&Redo', slot=None,
shortcut='Ctrl+Y', tip='Redo')
add_actions(self.menu_edit, (self.action_undo, self.action_redo, None))
##### View #####
# zoom in
self.action_zoomin = create_action(self, '&Zoom In', slot=None,
shortcut="Ctrl++", tip='Zoom in')
# zoom out
self.action_zoomout = create_action(self, '&Zoom Out', slot=None,
shortcut="Ctrl+-", tip='Zoom out')
# show entire image
self.action_showentire = create_action(self, '&Show Entire Image', slot=None,
shortcut='Ctrl+Alt+E', tip='Show the entire image')
# show selected image
self.action_showselected = create_action(self, '&Show Selected Image', slot=None,
shortcut='Ctrl+Alt+R', tip='Show the selected image')
add_actions(self.menu_viewer, (self.action_zoomin, self.action_zoomout, None,
self.action_showentire, self.action_showselected))
##### Run #####
# predict
self.action_predict = create_action(self, '&Predict Table', slot=None,
shortcut="Ctrl+P", tip='Predict texts through google cloud vision API')
# done
self.action_done = create_action(self, '&Done', slot=None,
shortcut="Ctrl+G", tip='Finish editing. Auto save will be done and '
'default file name will be renamed by appending the sequence number')
# remove
self.action_discard = create_action(self, '&Discard', slot=None,
shortcut="Ctrl+D", tip='Discard the rectangle/quadrangle in selection mode\n'
'Discard the results in editing annotation mode')
# area mode
self.action_areaRectMode = create_action(self, '&Rectangle Mode', slot=None,
shortcut="r", tip='Use rectangle for prediction')
self.action_areaQuadMode = create_action(self, '&Quadrangle Mode', slot=None,
shortcut="q", tip='Use quadrangle for prediction')
# predict as image mode
self.action_predictImageMode = create_action(self, '&Image mode', slot=None,
shortcut='Ctrl+shift+I', tip='Predict texts as Image mode')
# predict as table mode
self.action_predictDocumentMode = create_action(self, '&Document mode', slot=None,
shortcut='Ctrl+shift+D', tip='Predict texts as Document mode')
add_actions(self.menu_prediction, (self.action_predict, None, self.action_done, self.action_discard, None,
self.action_areaRectMode, self.action_areaQuadMode, None,
self.action_predictImageMode, self.action_predictDocumentMode))
##### Help #####
# about
self.action_about = create_action(self, '&About', slot=None,
tip='About Table Data Analyzer')
# preferences
self.action_preferences = create_action(self, '&Preferences', slot=None,
shortcut="Ctrl+,", tip='Open preferences')
add_actions(self.menu_help, (self.action_about, None, self.action_preferences))
def updateUI(self):
"""
Check if each part is enabled or not
Returns
-------
"""
self.action_predict.setEnabled(self.model.isPredictable and not self.model.isPredicted)
self.action_done.setEnabled(self.model.isPredicted)
self.action_discard.setEnabled(self.model.isExistArea)
self.updateFile()
self.updateEdit()
self.updateViewer()
self.updatePrediction()
def updateFile(self):
# save
self.action_savetda.setEnabled(self.model.isPredicted)
self.action_saveastda.setEnabled(self.model.isPredicted)
self.action_loadtda.setEnabled(self.model.isExistImg)
# open file
self.action_backfile.setEnabled(self.model.isExistBackImg)
self.action_forwardfile.setEnabled(self.model.isExistForwardImg)
# export
self.action_exportCSV.setEnabled(self.model.isPredicted)
self.action_exportDataset.setEnabled(self.model.isPredicted)
def updateEdit(self):
self.action_undo.setEnabled((self.model.isUndoable and not self.model.isPredicted) or \
(self.model.annotations.isUndoable and self.model.isPredicted))
self.action_redo.setEnabled((self.model.isRedoable and not self.model.isPredicted) or \
(self.model.annotations.isRedoable and self.model.isPredicted))
def updateViewer(self):
self.action_zoomin.setEnabled(self.model.isExistImg and self.model.isZoomInable)
self.action_zoomout.setEnabled(self.model.isExistImg and self.model.isZoomOutable)
self.action_showselected.setEnabled(self.model.isPredictable)
def updatePrediction(self):
# below is replaced into
# `not (self.model.showingmode == ShowingMode.SELECTED and not self.model.isRectPredictable)`
# due to De Morgan's laws
self.action_areaRectMode.setEnabled((self.model.showingmode == ShowingMode.ENTIRE or self.model.isRectPredictable)
and (not self.model.isPredicted))
self.action_areaQuadMode.setEnabled((self.model.showingmode == ShowingMode.ENTIRE or self.model.isQuadPredictable)
and (not self.model.isPredicted))
self.action_predictImageMode.setEnabled(not self.model.isPredicted)
self.action_predictDocumentMode.setEnabled(not self.model.isPredicted)
def updateLanguage(self):
language = self.model.language
# menu
self.menu_file.setTitle(language.menu_file)
self.menu_edit.setTitle(language.menu_edit)
self.menu_viewer.setTitle(language.menu_viewer)
self.menu_prediction.setTitle(language.menu_prediction)
self.menu_help.setTitle(language.menu_help)
# action
# file menu
self.action_openfolder.setText(language.menu_action_openfolder)
self.action_openfiles.setText(language.menu_action_openfiles)
self.action_savetda.setText(language.menu_action_savetda)
self.action_saveastda.setText(language.menu_action_saveastda)
self.action_loadtda.setText(language.menu_action_loadtda)
self.action_backfile.setText(language.menu_action_backfile)
self.action_forwardfile.setText(language.menu_action_forwardfile)
self.action_exportCSV.setText(language.menu_action_exportCSV)
self.action_exportDataset.setText(language.menu_action_exportDataset)
self.action_exit.setText(language.menu_action_exit)
# edit menu
self.action_undo.setText(language.menu_action_undo)
self.action_redo.setText(language.menu_action_redo)
# view menu
self.action_zoomin.setText(language.menu_action_zoomin)
self.action_zoomout.setText(language.menu_action_zoomout)
self.action_showentire.setText(language.menu_action_showentire)
self.action_showselected.setText(language.menu_action_showselected)
# prediction menu
self.action_predict.setText(language.menu_action_predict)
self.action_done.setText(language.menu_action_done)
self.action_discard.setText(language.menu_action_discard)
self.action_areaRectMode.setText(language.menu_action_areaRectMode)
self.action_areaQuadMode.setText(language.menu_action_areaQuadMode)
self.action_predictImageMode.setText(language.menu_action_predictImageMode)
self.action_predictDocumentMode.setText(language.menu_action_predictDocumentMode)
# help menu
self.action_about.setText(language.menu_action_about)
self.action_preferences.setText(language.menu_action_preferences)
| [
"kado_89_ssp_jun@yahoo.co.jp"
] | kado_89_ssp_jun@yahoo.co.jp |
757105536cd00672a2b78440891188cce80724ea | 77e27a3793d66ae18c3933f1bb1f2e091dfd2890 | /session0/answers.py | 709ffd7e2cbd2023e8ea7a0284d4f3899cf989eb | [
"BSD-2-Clause"
] | permissive | jimmysong/pb-exercises | 44504b2727fdaecedf2102197962239dc695b7ea | 95e8046c930fc044c612484b58e0dca5136660ba | refs/heads/master | 2023-07-19T21:28:19.409469 | 2023-07-17T00:59:45 | 2023-07-17T00:59:45 | 102,324,750 | 166 | 91 | BSD-2-Clause | 2022-02-21T23:31:13 | 2017-09-04T06:08:41 | Python | UTF-8 | Python | false | false | 8,706 | py | """
#code
>>> import helper
#endcode
#markdown
### This is a Jupyter Notebook
You can write Python code and it will execute. You can write the typical 'hello world' program like this:
```python
print('hello world')
```
You can execute by pressing shift-enter. Try it! You can also click the Run button in the toolbar.
#endmarkdown
#code
>>> print('hello world')
hello world
#endcode
#exercise
You can do a lot more than just print "hello world"
This is a fully functioning Python3 interpreter so you can write functions and objects like in the next box.
Try printing the 21st Fibonacci number below instead of the 11th. You can add caching if you want to practice coding in Python.
---
>>> def fib(n):
... if n in (0,1):
... return 1
... else:
... return fib(n-1) + fib(n-2)
>>> print(fib(20)) #/print(fib(10)) # CHANGE THIS LINE
10946
#endexercise
#markdown
### A few things you should remember in Python 3
Strings and bytes are now different
```python
s = 'hello world'
b = b'hello world'
```
These may look the same but the 'b' prefix means that the variable `b` is bytes whereas the variable `s` is a string. Basically, the on-disk characters on the system are bytes and the actual symbols in unicode are strings. A good explanation of the difference is [here](http://www.diveintopython3.net/strings.html).
#endmarkdown
#code
>>> s = 'hello world'
>>> b = b'hello world'
>>>
>>> print(s==b) # False
False
>>>
>>> # You convert from string to bytes this way:
>>>
>>> hello_world_bytes = s.encode('ascii')
>>> print(hello_world_bytes == b) # True
True
>>>
>>> # You convert from bytes to string this way:
>>>
>>> hello_world_string = b.decode('ascii')
>>> print(hello_world_string == s) # True
True
#endcode
#markdown
### Imports
You already have unit tests that are written for you.
Your task is to make them pass.
We can import various modules to make our experience using Jupyter more pleasant.
This way, making everything work will be a lot easier.
#endmarkdown
#code
>>> # this is how you import an entire module
>>> import helper
>>>
>>> # this is how you import a particular function, class or constant
>>> from helper import little_endian_to_int
>>>
>>> # used in the next exercise
>>> some_long_variable_name = 'something'
#endcode
#exercise
#### Jupyter Tips
The two most useful commands are tab and shift-tab
Tab lets you tab-complete. Try pressing tab after the `some` below. This will complete to the variable name that's there from the last cell.
Shift-Tab gives you a function/method signature. Try pressing shift-tab after the `little_endian_to_int` below. That's also there from the last cell.
---
>>> some_long_variable_name #/some # press *tab* here
'something'
>>> little_endian_to_int(b'\\x00') #/little_endian_to_int() # press shift-tab here
0
#endexercise
#unittest
helper:HelperTest:test_bytes:
Open [helper.py](/edit/session0/helper.py) and implement the `bytes_to_str` and `str_to_bytes` functions. Once you're done editing, run the cell below.
#endunittest
#markdown
### Getting Help
If you can't get this, there's a [complete directory](/tree/session0/complete) that has the [helper.py file](/edit/session0/complete/helper.py) and the [session0.ipynb file](/notebooks/session0/complete/session0.ipynb) which you can use to get the answers.
#endmarkdown
#markdown
### Useful Python 3 Idioms
You can reverse a list by using `[::-1]`:
```python
a = [1, 2, 3, 4, 5]
print(a[::-1]) # [5, 4, 3, 2, 1]
```
Also works on both strings and bytes:
```python
s = 'hello world'
print(s[::-1]) # 'dlrow olleh'
b = b'hello world'
print(b[::-1]) # b'dlrow olleh'
```
Indexing bytes will get you the numerical value:
```python
print(b'&'[0]) # 38 since & is character #38
```
You can do the reverse by using bytes:
```python
print(bytes([38])) # b'&'
```
#endmarkdown
#code
>>> a = [1, 2, 3, 4, 5]
>>> print(a[::-1]) # [5, 4, 3, 2, 1]
[5, 4, 3, 2, 1]
>>>
>>> s = 'hello world'
>>> print(s[::-1]) # 'dlrow olleh'
dlrow olleh
>>> b = b'hello world'
>>> print(b[::-1]) # b'dlrow olleh'
b'dlrow olleh'
>>>
>>> print(b'&'[0]) # 38 since & character #38
38
>>>
>>> print(bytes([38])) # b'&'
b'&'
#endcode
#markdown
### Python Tricks
Here is how we convert binary to/from hex:
#endmarkdown
#code
>>> print(b'hello world'.hex())
68656c6c6f20776f726c64
>>> print(bytes.fromhex('68656c6c6f20776f726c64'))
b'hello world'
#endcode
#exercise
Reverse this hex dump: `b010a49c82b4bc84cc1dfd6e09b2b8114d016041efaf591eca88959e327dd29a`
Hint: you'll want to turn this into binary data, reverse and turn it into hex again
---
>>> h = 'b010a49c82b4bc84cc1dfd6e09b2b8114d016041efaf591eca88959e327dd29a'
>>> # convert to binary (bytes.fromhex)
>>> b = bytes.fromhex(h) #/
>>> # reverse ([::-1])
>>> b_rev = b[::-1] #/
>>> # convert to hex()
>>> h_rev = b_rev.hex() #/
>>> # print the result
>>> print(h_rev) #/
9ad27d329e9588ca1e59afef4160014d11b8b2096efd1dcc84bcb4829ca410b0
#endexercise
#markdown
### Modular Arithmetic
If you don't remember Modular Arithmetic, it's this function on python
```python
39 % 12
```
The result is 3 because that is the remainder after division (39 / 12 == 3 + 3/12).
Some people like to call it "wrap-around" math. If it helps, think of modular arithmetic like a clock:

Think of taking the modulo as asking the question "what hour will it be 39 hours from now?"
If you're still confused, please take a look at [this](https://www.khanacademy.org/computing/computer-science/cryptography/modarithmetic/a/what-is-modular-arithmetic) article.
#endmarkdown
#code
>>> print(39 % 12)
3
#endcode
#exercise
Find the modulo 19 of these numbers:
* 99
* \\(456 \cdot 444\\)
* \\(9^{77}\\)
(note python uses ** to do exponentiation)
---
>>> prime = 19
>>> print(99 % prime) #/
4
>>> print(456*444 % prime) #/
0
>>> print(9**77 % prime) #/
16
#endexercise
#markdown
### Converting from bytes to int and back
Converting from bytes to integer requires learning about Big and Little Endian encoding. Essentially any number greater than 255 can be encoded in two ways, with the "Big End" going first or the "Little End" going first.
Normal human reading is from the "Big End". For example 123 is read as 100 + 20 + 3. Some computer systems encode integers with the "Little End" first.
A number like 500 is encoded this way in Big Endian:
0x01f4 (256 + 244)
But this way in Little Endian:
0xf401 (244 + 256)
In Python we can convert an integer to big or little endian using a built-in method:
```python
n = 1234567890
big_endian = n.to_bytes(4, 'big') # b'\x49\x96\x02\xd2'
little_endian = n.to_bytes(4, 'little') # b'\xd2\x02\x96\x49'
```
We can also convert from bytes to an integer this way:
```python
big_endian = b'\x49\x96\x02\xd2'
n = int.from_bytes(big_endian, 'big') # 1234567890
little_endian = b'\xd2\x02\x96\x49'
n = int.from_bytes(little_endian, 'little') # 1234567890
```
#endmarkdown
#code
>>> n = 1234567890
>>> big_endian = n.to_bytes(4, 'big')
>>> little_endian = n.to_bytes(4, 'little')
>>>
>>> print(big_endian.hex())
499602d2
>>> print(little_endian.hex())
d2029649
>>>
>>> print(int.from_bytes(big_endian, 'big'))
1234567890
>>> print(int.from_bytes(little_endian, 'little'))
1234567890
#endcode
#exercise
Convert the following:
* 8675309 to 8 bytes in big endian
* interpret ```b'\x11\x22\x33\x44\x55'``` as a little endian integer
---
>>> n = 8675309
>>> print(n.to_bytes(8, 'big')) #/
b'\\x00\\x00\\x00\\x00\\x00\\x84_\\xed'
>>> little_endian = b'\x11\x22\x33\x44\x55'
>>> print(int.from_bytes(little_endian, 'little')) #/
366216421905
#endexercise
#unittest
helper:HelperTest:test_little_endian_to_int:
We'll want to convert from little-endian bytes to an integer often, so write a function that will do this.
#endunittest
#unittest
helper:HelperTest:test_int_to_little_endian:
Similarly, we'll want to do the inverse operation, so write a function that will convert an integer to little-endian bytes given the number and the number of bytes it should take up.
#endunittest
"""
from unittest import TestCase
import helper
def bytes_to_str(b, encoding="ascii"):
return b.decode(encoding)
def str_to_bytes(s, encoding="ascii"):
return s.encode(encoding)
def little_endian_to_int(b):
return int.from_bytes(b, "little")
def int_to_little_endian(n, length):
return n.to_bytes(length, "little")
class SessionTest(TestCase):
def test_apply(self):
helper.bytes_to_str = bytes_to_str
helper.str_to_bytes = str_to_bytes
helper.little_endian_to_int = little_endian_to_int
helper.int_to_little_endian = int_to_little_endian
| [
"jaejoon@gmail.com"
] | jaejoon@gmail.com |
f5c59364b1ad428e4c9a85d5771d8f42799e1390 | e562f7e0a51273475e50a7e61d1d377f88775622 | /WebMirror/SpecialCase.py | 5cc8b15a5803bf4e34658c351f8014916ff5a21f | [] | no_license | bloodcurdle/ReadableWebProxy | d1c6ae0220fdb04ea7ab82963c86e776a0dbbfd9 | 10f68f913a78f8b0e47582996d9860a61da55dd6 | refs/heads/master | 2021-05-29T19:58:32.965610 | 2015-11-09T18:25:00 | 2015-11-09T18:25:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,427 | py |
import logging
import datetime
import random
import WebMirror.database as db
import multiprocessing
import time
random.seed()
# import WebMirror.rules
# import WebMirror.LogBase as LogBase
# import runStatus
# import time
# import os.path
# import os
# import sys
# import sqlalchemy.exc
# from sqlalchemy import desc
# from sqlalchemy.sql import text
# from sqlalchemy import distinct
# from sqlalchemy.dialects import postgresql
# import WebMirror.util.urlFuncs
# import urllib.parse
# import traceback
# import datetime
# from sqlalchemy.sql import text
# from sqlalchemy.sql import func
# import WebMirror.util.webFunctions as webFunctions
# import hashlib
# from WebMirror.Fetch import DownloadException
# import WebMirror.Fetch
# import WebMirror.database as db
# from config import C_RESOURCE_DIR
# from activePlugins import INIT_CALLS
# if "debug" in sys.argv:
# CACHE_DURATION = 1
# RSC_CACHE_DURATION = 1
# # CACHE_DURATION = 60 * 5
# # RSC_CACHE_DURATION = 60 * 60 * 5
# else:
# CACHE_DURATION = 60 * 60 * 24 * 7
# RSC_CACHE_DURATION = 60 * 60 * 24 * 147
ACTIVE_FETCHES = {
# Populated at runtime
}
FETCH_LOCK = multiprocessing.Lock()
log = logging.getLogger("Main.Web.SpecialCaseHandler")
def handleRemoteFetch(params, job, engine):
print("Remote fetch command!")
pass
def handleRateLimiting(params, job, engine):
allowable = params[0]
with FETCH_LOCK:
if not job.netloc in ACTIVE_FETCHES:
ACTIVE_FETCHES[job.netloc] = 0
log.info("Active fetchers for domain %s - %s", job.netloc, ACTIVE_FETCHES[job.netloc])
if ACTIVE_FETCHES[job.netloc] > allowable:
log.info("Too many instances of fetchers for domain %s active. Forcing requests to sleep for a while", job.netloc)
job.ignoreuntiltime = datetime.datetime.now() + datetime.timedelta(seconds=60*5 + random.randrange(0, 60*5))
db.get_session().commit()
return
else:
with FETCH_LOCK:
ACTIVE_FETCHES[job.netloc] += 1
engine.do_job(job)
time.sleep(5)
with FETCH_LOCK:
ACTIVE_FETCHES[job.netloc] -= 1
dispatchers = {
'remote_fetch' : handleRemoteFetch,
'rate_limit' : handleRateLimiting,
}
def handleSpecialCase(job, engine, rules):
commands = rules[job.netloc]
op, params = commands[0], commands[1:]
if op in dispatchers:
dispatchers[op](params, job, engine)
else:
log.error("Error! Unknown special-case filter!")
print("Filter name: '%s', parameters: '%s', job URL: '%s'", op, params, job.url)
| [
"something@fake-url.com"
] | something@fake-url.com |
75442cee4229c57d333ef72e9ab9a0ece37171d7 | bffcbc372a73aa386d20e50cf85f6943bdb640fe | /image_processing_lab2/read_grey_image_general_kernel_v2.py | 1b060512bb69294623a1789bd823b90e80d3e49d | [] | no_license | liketheflower/img_processing_csc475 | 5852cefe411c3b0b55b7383c697727e450930594 | d50b1b65e5bdd509065d6eb7bbda3db23b9a9005 | refs/heads/master | 2021-05-04T13:43:02.928721 | 2018-05-07T15:27:41 | 2018-05-07T15:27:41 | 120,321,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,510 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def crop(start, end, origin_image):
return origin_image[start[0]:end[0]+1,start[1]:end[1]+1]
def show_img(img):
plt.figure()
plt.imshow(img, cmap='gray', aspect='auto')
plt.show()
def mean_filter_new(img):
# mean filter
filter_ =[[1/9.0 ,1/9.0, 1/9.0],
[1/9.0 ,1/9.0, 1/9.0],
[1/9.0 ,1/9.0, 1/9.0]]
# gaussian filter
filter_ =[[1/16.0 ,2/16.0, 1/16.0],
[2/16.0 ,4/16.0, 2/16.0],
[1/16.0 ,2/16.0, 1/16.0]]
# sobel filter row(x) direction
filter_ =[[-1 ,0, 1],
[-2 ,0, 2],
[-1 ,0, 1]]
# sobel filter col(y) direction
""" filter_ =[[-1 ,-2, -1],
[0 , 0, 0],
[1 , 2, 1]]
"""
new_img = np.zeros(img.shape)
R, C = new_img.shape
D = 1
for i in xrange(R):
for j in xrange(C):
start = (i-D,j-D)
end = (i+D,j+D)
if i-D<0 or j-D<0 or i+D>R-1 or j+D >C-1:
continue
crop_img=crop(start, end, img)
for ii in xrange(3):
for jj in xrange(3):
new_img[i,j] +=crop_img[ii,jj]*filter_[ii][jj]
return new_img
def mean_filter(filter_size, img):
new_img = np.zeros(img.shape)
R, C = new_img.shape
print type(R), type(C)
D = filter_size/2
for i in xrange(R):
for j in xrange(C):
start = (i-D,j-D)
end = (i+D,j+D)
if i-D<0 or j-D<0 or i+D>R-1 or j+D >C-1:
continue
crop_img=crop(start, end, img)
new_img[i,j] = np.average(crop_img)
return new_img
file_name = "lena_gray.bmp"
#file_name ="zelda2.bmp"
#file_name = "./../../DM/ID13_YANGJUYING_OD01/Intensity/ID13_YANGJUYING_OD01_0.bmp"
img = mpimg.imread(file_name)
#img = np.zeros((200,200))
#tem_img = np.ones((100,200))
#img[:100,:] = tem_img
plt.figure()
plt.imshow(img, cmap='gray', aspect='auto')
plt.show()
filter_size = 3
#new_img = mean_filter(filter_size,img)
new_img = mean_filter_new(img)
plt.figure()
plt.imshow(new_img, cmap='gray', aspect='auto')
plt.title("image after new mean filter with size of "+str(filter_size))
plt.show()
start =(100,100)
end =(300,300)
croped_image = crop(start, end, img)
print croped_image
show_img(croped_image)
croped_image = crop((100,100), (110,110), img)
print croped_image
show_img(croped_image)
| [
"jim.morris.shen@gmail.com"
] | jim.morris.shen@gmail.com |
45e24bd2f0b4c25cfc4070133cbbbf1078100f86 | a5a682cfcb90660b35806b895b7c6e8dfa9914c2 | /h/streamer/test/streamer_test.py | 790f76a40822f2510c40638715ceee05cbf1973d | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause",
"BSD-2-Clause-Views"
] | permissive | jazahn/h | 507ead458bb16134b79c270110ff72b7a8bfd600 | 8cfd1012d4ee41a21d1ad81427961ac2ac59464f | refs/heads/master | 2021-01-15T14:49:51.006483 | 2016-04-15T18:14:53 | 2016-04-15T18:14:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,534 | py | # -*- coding: utf-8 -*-
import mock
from mock import call
import pytest
from h.streamer import nsq
from h.streamer import streamer
from h.streamer import websocket
def test_process_work_queue_sends_nsq_messages_to_nsq_handle_message(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
nsq.handle_message.assert_called_once_with(message,
topic_handlers=mock.ANY)
def test_process_work_queue_uses_appropriate_topic_handlers_for_nsq_messages(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
streamer.process_work_queue({'nsq.namespace': 'wibble'},
queue,
session_factory=lambda: session)
topic_handlers = {
'wibble-annotations': nsq.handle_annotation_event,
'wibble-user': nsq.handle_user_event,
}
nsq.handle_message.assert_called_once_with(mock.ANY,
topic_handlers=topic_handlers)
def test_process_work_queue_sends_websocket_messages_to_websocket_handle_message(session):
message = websocket.Message(socket=mock.sentinel.SOCKET, payload='bar')
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
websocket.handle_message.assert_called_once_with(message)
def test_process_work_queue_commits_after_each_message(session):
message1 = websocket.Message(socket=mock.sentinel.SOCKET, payload='bar')
message2 = nsq.Message(topic='foo', payload='bar')
queue = [message1, message2]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
assert session.commit.call_count == 2
def test_process_work_queue_rolls_back_on_handler_exception(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
nsq.handle_message.side_effect = RuntimeError('explosion')
streamer.process_work_queue({}, queue, session_factory=lambda: session)
session.commit.assert_not_called()
session.rollback.assert_called_once_with()
def test_process_work_queue_rolls_back_on_unknown_message_type(session):
message = 'something that is not a message'
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
session.commit.assert_not_called()
session.rollback.assert_called_once_with()
def test_process_work_queue_calls_close_after_commit(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
assert session.method_calls[-2:] == [
call.commit(),
call.close()
]
def test_process_work_queue_calls_close_after_rollback(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
nsq.handle_message.side_effect = RuntimeError('explosion')
streamer.process_work_queue({}, queue, session_factory=lambda: session)
assert session.method_calls[-2:] == [
call.rollback(),
call.close()
]
@pytest.fixture
def session():
return mock.Mock(spec_set=['close', 'commit', 'execute', 'rollback'])
@pytest.fixture(autouse=True)
def nsq_handle_message(patch):
return patch('h.streamer.nsq.handle_message')
@pytest.fixture(autouse=True)
def websocket_handle_message(patch):
return patch('h.streamer.websocket.handle_message')
| [
"nick@whiteink.com"
] | nick@whiteink.com |
79846055cce04d4328cacdf55f4e288981d93e9d | c9fe05f893deff75232aabca4e877c144972249a | /arcpyenv/arcgispro-py3-clone/Lib/site-packages/arcgis/gis/admin/_system.py | a47fe478733b0dfc6c309d2871724b37aac8794d | [
"Python-2.0"
] | permissive | SherbazHashmi/HackathonServer | 4d1dc7f0122a701a0f3a17787d32efe83bc67601 | a874fe7e5c95196e4de68db2da0e2a05eb70e5d8 | refs/heads/master | 2022-12-26T06:46:33.893749 | 2019-11-03T10:49:47 | 2019-11-03T10:49:47 | 218,912,149 | 3 | 3 | null | 2022-12-11T11:52:37 | 2019-11-01T04:16:38 | Python | UTF-8 | Python | false | false | 51,113 | py | """
Modifies a local portal's system settings.
"""
from ..._impl.connection import _ArcGISConnection
from .. import GIS
from ._base import BasePortalAdmin
from ..._impl.common._mixins import PropertyMap
########################################################################
class System(BasePortalAdmin):
"""
This resource is an umbrella for a collection of system-wide resources
for your portal. This resource provides access to the ArcGIS Web
Adaptor configuration, portal directories, database management server,
indexing capabilities, license information, and the properties of your
portal.
"""
_gis = None
_con = None
_url = None
#----------------------------------------------------------------------
def __init__(self, url, gis=None, **kwargs):
"""Constructor"""
super(System, self).__init__(url=url,
gis=gis,
**kwargs)
initialize = kwargs.pop("initialize", False)
if isinstance(gis, _ArcGISConnection):
self._con = gis
elif isinstance(gis, GIS):
self._gis = gis
self._con = gis._con
else:
raise ValueError(
"connection must be of type GIS or _ArcGISConnection")
if initialize:
self._init(self._gis)
#----------------------------------------------------------------------
@property
def properties(self):
"""
Gets/Sets the system properties that have been modified to control
the portal's environment. The list of available properties are:
- privatePortalURL-Informs the portal that it has a front end
load-balancer/proxy reachable at the URL. This property is
typically used to set up a highly available portal configuration
- portalLocalhostName-Informs the portal back-end to advertise the
value of this property as the local portal machine. This is
typically used during federation and when the portal machine has
one or more public host names.
- httpProxyHost-Specifies the HTTP hostname of the proxy server
- httpProxyPort-Specifies the HTTP port number of the proxy server
- httpProxyUser-Specifies the HTTP proxy server username.
- httpProxyPassword-Specifies the HTTP proxy server password.
- isHttpProxyPasswordEncrypted-Set this property to false when you
are configuring the HTTP proxy server password in plain text.
After configuration, the password will be encrypted and this
property will be set to true
- httpsProxyHost-Specifies the HTTPS hostname of the proxy server
- httpsProxyPort-Specifies the HTTPS port number of the proxy
server
- httpsProxyUser-Specifies the HTTPS proxy server username
- httpsProxyPassword-Specifies the HTTPS proxy server password
- isHttpsProxyPasswordEncrypted-Set this property to false when
you are configuring the HTTPS proxy server password in plain
text. After configuration, the password will be encrypted and
this property will be set to true.
- nonProxyHosts-If you want to federate ArcGIS Server and the site
does not require use of the forward proxy, list the server
machine or site in the nonProxyHosts property. Machine and
domain items are separated using a pipe (|).
- WebContextURL-If you are using a reverse proxy, set this
property to reverse proxy URL.
- ldapCertificateValidation Introduced at 10.7. When set to true,
any encrypted LDAP communication (LDAPS) made from the portal to
the user or group identity store will enforce certificate
validation. The default value is false.
"""
url = "%s/properties" % self._url
params = {"f" : "json"}
return self._con.get(path=url, params=params)
#----------------------------------------------------------------------
@properties.setter
def properties(self, properties):
"""
Gets/Sets the system properties that have been modified to control
the portal's environment. The list of available properties are:
- privatePortalURL-Informs the portal that it has a front end
load-balancer/proxy reachable at the URL. This property is
typically used to set up a highly available portal configuration
- portalLocalhostName-Informs the portal back-end to advertise the
value of this property as the local portal machine. This is
typically used during federation and when the portal machine has
one or more public host names.
- httpProxyHost-Specifies the HTTP hostname of the proxy server
- httpProxyPort-Specifies the HTTP port number of the proxy server
- httpProxyUser-Specifies the HTTP proxy server username.
- httpProxyPassword-Specifies the HTTP proxy server password.
- isHttpProxyPasswordEncrypted-Set this property to false when you
are configuring the HTTP proxy server password in plain text.
After configuration, the password will be encrypted and this
property will be set to true
- httpsProxyHost-Specifies the HTTPS hostname of the proxy server
- httpsProxyPort-Specifies the HTTPS port number of the proxy
server
- httpsProxyUser-Specifies the HTTPS proxy server username
- httpsProxyPassword-Specifies the HTTPS proxy server password
- isHttpsProxyPasswordEncrypted-Set this property to false when
you are configuring the HTTPS proxy server password in plain
text. After configuration, the password will be encrypted and
this property will be set to true.
- nonProxyHosts-If you want to federate ArcGIS Server and the site
does not require use of the forward proxy, list the server
machine or site in the nonProxyHosts property. Machine and
domain items are separated using a pipe (|).
- WebContextURL-If you are using a reverse proxy, set this
property to reverse proxy URL.
- ldapCertificateValidation-Introduced at 10.7. When set to true,
any encrypted LDAP communication (LDAPS) made from the portal to
the user or group identity store will enforce certificate
validation. The default value is false.
"""
url = "%s/properties/update" % self._url
params = {"f" : "json",
"properties" : properties}
self._con.get(path=url, params=params)
#----------------------------------------------------------------------
@property
def web_adaptors(self):
"""
The Web Adaptors resource lists the ArcGIS Web Adaptor configured
with your portal. You can configure the Web Adaptor by using its
configuration web page or the command line utility provided with
the installation.
"""
url = "%s/webadaptors" % self._url
return WebAdaptors(url=url, gis=self._con)
#----------------------------------------------------------------------
@property
def directories(self):
"""
The directories resource is a collection of directories that are
used by the portal to store and manage content. Beginning at
10.2.1, Portal for ArcGIS supports five types of directories:
- Content directory-The content directory contains the data
associated with every item in the portal.
- Database directory-The built-in security store and sharing
rules are stored in a Database server that places files in the
database directory.
- Temporary directory - The temporary directory is used as a
scratch workspace for all the portal's runtime components.
- Index directory-The index directory contains all the indexes
associated with the content in the portal. The indexes are used
for quick retrieval of information and for querying purposes.
- Logs directory-Errors and warnings are written to text files in
the log file directory. Each day, if new errors or warnings are
encountered, a new log file is created.
If you would like to change the path for a directory, you can use
the Edit Directory operation.
"""
res = []
surl = "%s/directories" % self._url
params = {'f' : "json"}
for d in self._con.get(path=surl, params=params)['directories']:
url = "%s/directories/%s" % (self._url, d['name'])
res.append(Directory(url=url, gis=self._con))
return res
#----------------------------------------------------------------------
@property
def licenses(self):
"""
Portal for ArcGIS requires a valid license to function correctly.
This resource returns the current status of the license.
Starting at 10.2.1, Portal for ArcGIS enforces the license by
checking the number of registered members and comparing it with the
maximum number of members authorized by the license. Contact Esri
Customer Service if you have questions about license levels or
expiration properties.
"""
if self._gis.version < [7,1]:
url = "%s/licenses" % self._url
return Licenses(url=url, gis=self._con)
else:
import os
u = os.path.dirname(self._url)
url = "%s/license" % u
return PortalLicense(url=url, gis=self._con)
return None
#----------------------------------------------------------------------
@property
def database(self):
"""
The database resource represents the database management system
(DBMS) that contains all of the portal's configuration and
relationship rules. This resource also returns the name and version
of the database server currently running in the portal.
You can use the properety to update database accounts
"""
url = "%s/database" % self._url
params = {"f" : "json"}
return self._con.get(path=url, params=params)
#----------------------------------------------------------------------
@database.setter
def database(self, value):
"""
The database resource represents the database management system
(DBMS) that contains all of the portal's configuration and
relationship rules. This resource also returns the name and version
of the database server currently running in the portal.
You can use the properety to update database accounts
"""
url = "%s/database" % self._url
params = {"f" : "json"}
for k,v in value.items():
params[k] = v
self._con.post(path=url, postdata=params)
#----------------------------------------------------------------------
@property
def index_status(self):
"""
The status resource allows you to view the status of the indexing
service. You can view the number of users, groups, and search items
in both the database (store) and the index.
If the database and index do not match, indexing is either in
progress or there is a problem with the index. It is recommended
that you reindex to correct any issues. If indexing is in progress,
you can monitor the status by refreshing the page.
:returns: dict
.. code-block:: python
USAGE: Prints out current Index Status
from arcgis.gis import GIS
gis = GIS("https://yourportal.com/portal", "portaladmin", "password")
sysmgr = gis.admin.system
idx_status = sysmgr.index_status
import json
print(json.dumps(idx_status, indent=2))
# Output
{
"indexes": [
{
"name": "users",
"databaseCount": 51,
"indexCount": 51
},
{
"name": "groups",
"databaseCount": 325,
"indexCount": 325
},
{
"name": "search",
"databaseCount": 8761,
"indexCount": 8761
}
]
}
"""
url = "%s/indexer/status" % self._url
params = {'f' : 'json'}
return self._con.get(path=url, params=params)
#----------------------------------------------------------------------
def reindex(self, mode='FULL', includes=None):
"""
This operation allows you to generate or update the indexes for
content; such as users, groups, and items stored in the database
(store). During the process of upgrading an earlier version of
Portal for ArcGIS, you are required to update the indexes by
running this operation. You can check the status of your indexes
using the status resource.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
mode Optional string. The mode in which the indexer should run.
Values USER_MODE | GROUP_MODE | SEARCH_MODE | FULL
--------------------------- --------------------------------------------------------------------
includes Optional string. An optional comma separated list of elements to
include in the index. This is useful if you want to only index
certain items or user accounts.
=========================== ====================================================================
:returns: boolean
"""
url = "%s/indexer/reindex" % self._url
if mode.lower() == 'full':
mode = "FULL_MODE"
params = {
"f" : "json",
"mode" : mode
}
if includes:
params['includes'] = includes
res = self._con.post(path=url,
postdata=params)
if 'status' in res:
return res['status'] == 'success'
return False
#----------------------------------------------------------------------
@property
def languages(self):
"""
This resource gets/sets which languages will appear in portal
content search results. Use the Update languages operation to
modify which language'content will be available.
"""
url = "%s/languages" % self._url
params = {"f" : "json"}
return self._con.get(url,
params)
#----------------------------------------------------------------------
@languages.setter
def languages(self, value):
"""
This resource gets/sets which languages will appear in portal
content search results. Use the Update languages operation to
modify which language'content will be available.
"""
url = "%s/languages/update" % self._url
params = {"f" : "json",
'languages' : value}
self._con.post(url,
params)
#----------------------------------------------------------------------
@property
def content_discovery(self):
"""
This resource allows an administrator to enable or disable external content discovery from the portal website.
Because some Esri-provided content requires external access to the internet, an administrator may choose to disable the content to prevent requests to ArcGIS Online resources. When disabling the content, a select group of items will be disabled:
- All basemaps owned by "esri_[lang]"
- All content owned by "esri_nav"
- All content owned by "esri"
This resource will not disable ArcGIS Online utility services or Living Atlas content. For steps to disable these items, refer to the Portal Administrator guide.
When external content is disabled, System Languages are also disabled.
================== ====================================================================
**Argument** **Description**
------------------ --------------------------------------------------------------------
value required Boolean. If true, external content is enabled, else it is
disabled.
================== ====================================================================
:returns: boolean
"""
url = "%s/content/configuration" % self._url
params = {'f' : 'json'}
res = self._con.get(url, params)
return res["isExternalContentEnabled"]
#----------------------------------------------------------------------
@content_discovery.setter
def content_discovery(self, value):
"""
This resource allows an administrator to enable or disable external content discovery from the portal website.
Because some Esri-provided content requires external access to the internet, an administrator may choose to disable the content to prevent requests to ArcGIS Online resources. When disabling the content, a select group of items will be disabled:
- All basemaps owned by "esri_[lang]"
- All content owned by "esri_nav"
- All content owned by "esri"
This resource will not disable ArcGIS Online utility services or Living Atlas content. For steps to disable these items, refer to the Portal Administrator guide.
When external content is disabled, System Languages are also disabled.
================== ====================================================================
**Argument** **Description**
------------------ --------------------------------------------------------------------
value required Boolean. If true, external content is enabled, else it is
disabled.
================== ====================================================================
"""
import json
url = "%s/content/configuration/update" % self._url
params = {'f' : 'json',
'externalContentEnabled': json.dumps(value)}
res = self._con.post(url, params)
########################################################################
class WebAdaptors(BasePortalAdmin):
"""
The Web Adaptors resource lists the ArcGIS Web Adaptor configured with
your portal. You can configure the Web Adaptor by using its
configuration web page or the command line utility provided with the
installation.
"""
_gis = None
_con = None
_url = None
#----------------------------------------------------------------------
def __init__(self, url, gis=None, **kwargs):
"""Constructor"""
super(WebAdaptors, self).__init__(url=url,
gis=gis,
**kwargs)
initialize = kwargs.pop("initialize", False)
if isinstance(gis, _ArcGISConnection):
self._con = gis
elif isinstance(gis, GIS):
self._gis = gis
self._con = gis._con
else:
raise ValueError(
"connection must be of type GIS or _ArcGISConnection")
if initialize:
self._init(self._gis)
#----------------------------------------------------------------------
def list(self):
"""
Returns all instances of WebAdaptors
.. code-block:: python
USAGE: Get all Web Adaptors and list keys,values of first Web Adaptor object
from arcgis.gis import GIS
gis = GIS("https://yourportal.com/portal", "portaladmin", "password")
# Return a List of Web Adaptor objects
webadaptors = gis.admin.system.web_adaptors.list()
# Get the first Web Adaptor object and print out each of its values
for key, value in dict(webadaptors[0]).items():
print("{} : {}".format(key, value))
# Output
machineName : yourportal.com
machineIP : 10.11.12.13
webAdaptorURL : https://yourwebserver.com/portal
id : ac17d7b9-adbd-4c45-ae13-77b0ad6f14e8
description :
httpPort : 80
httpsPort : 443
refreshServerListInterval : 1
reconnectServerOnFailureInterval : 1
:return:
List of Web Adaptor objects. Typically, only 1 Web Adaptor will exist for a Portal
"""
res = []
if 'webAdaptors' in self.properties:
for wa in self.properties.webAdaptors:
url = "%s/%s" % (self._url, wa['id'])
res.append(WebAdaptor(url=url, gis=self._con))
return res
#----------------------------------------------------------------------
@property
def configuration(self):
"""
Gets/Sets the common properties and configuration of the ArcGIS Web
Adaptor configured with the portal.
"""
url = "%s/config" % self._url
params = {"f" : "json"}
return self._con.get(path=url, params=params)
#----------------------------------------------------------------------
@configuration.setter
def configuration(self, shared_key):
"""
Gets/Sets the common properties and configuration of the ArcGIS Web
Adaptor configured with the portal.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
shared_key Required string. This property represents credentials that are shared
with the Web Adaptor. The Web Adaptor uses these credentials to
communicate with the portal
=========================== ====================================================================
"""
url = "%s/config/update" % self._url
if isinstance(shared_key, str):
params = {
"webAdaptorsConfig": {'sharedkey' : shared_key},
"f" : "json"}
elif isinstance(shared_key, dict) and \
'sharedKey' in shared_key:
params = {"webAdaptorsConfig": shared_key,
"f" : "json"}
return self._con.post(path=url, postdata=params)
########################################################################
class WebAdaptor(BasePortalAdmin):
"""
The ArcGIS Web Adaptor is a web application that runs in a front-end
web server. One of the Web Adaptor's primary responsibilities is to
forward HTTP requests from end users to Portal for ArcGIS. The Web
Adaptor acts a reverse proxy, providing the end users with an entry
point to the system, hiding the back-end servers, and providing some
degree of immunity from back-end failures.
The front-end web server can authenticate incoming requests against
your enterprise identity stores and provide specific authentication
schemes such as Integrated Windows Authentication (IWA), HTTP Basic, or
Digest.
Most importantly, a Web Adaptor provides your end users with a well
defined entry point to your system without exposing the internal
details of your portal. Portal for ArcGIS will trust requests being
forwarded by the Web Adaptor and will not challenge the user for any
credentials. However, the authorization of the request (by looking up
roles and permissions) is still enforced by the portal's sharing rules.
"""
_gis = None
_con = None
_url = None
#----------------------------------------------------------------------
def __init__(self, url, gis=None, **kwargs):
"""Constructor"""
super(WebAdaptor, self).__init__(url=url,
gis=gis,
**kwargs)
initialize = kwargs.pop("initialize", False)
if isinstance(gis, _ArcGISConnection):
self._con = gis
elif isinstance(gis, GIS):
self._gis = gis
self._con = gis._con
else:
raise ValueError(
"connection must be of type GIS or _ArcGISConnection")
if initialize:
self._init(self._gis)
#----------------------------------------------------------------------
def unregister(self):
"""
You can use this operation to unregister the ArcGIS Web Adaptor
from your portal. Once a Web Adaptor has been unregistered, your
portal will no longer trust the Web Adaptor and will not accept any
credentials from it. This operation is typically used when you want
to register a new Web Adaptor or when your old Web Adaptor needs to
be updated.
"""
url = "%s/unregister" % self._url
params = {"f" : "json"}
return self._con.post(path=url, postdata=params)
########################################################################
class Directory(BasePortalAdmin):
"""
A directory is a file system-based folder that contains a specific type
of content for the portal. The physicalPath property of a directory
locates the actual path of the folder on the file system. Beginning at
10.2.1, Portal for ArcGIS supports local directories and network shares
as valid locations.
During the Portal for ArcGIS installation, the setup program asks you
for the root portal directory (that will contain all the portal's sub
directories). However, you can change each registered directory through
this API.
"""
_gis = None
_con = None
_url = None
#----------------------------------------------------------------------
def __init__(self, url, gis=None, **kwargs):
"""Constructor"""
super(Directory, self).__init__(url=url,
gis=gis,
**kwargs)
initialize = kwargs.pop("initialize", False)
if isinstance(gis, _ArcGISConnection):
self._con = gis
elif isinstance(gis, GIS):
self._gis = gis
self._con = gis._con
else:
raise ValueError(
"connection must be of type GIS or _ArcGISConnection")
if initialize:
self._init(self._gis)
#----------------------------------------------------------------------
@property
def properties(self):
"""
The properties operation on a directory can be used to change the
physical path and description properties of the directory. This is
useful when changing the location of a directory from a local path
to a network share. However, the API does not copy your content and
data from the old path to the new path. This has to be done
independently by the system administrator.
"""
return PropertyMap(self._json_dict)
#----------------------------------------------------------------------
@properties.setter
def properties(self, value):
"""
The properties operation on a directory can be used to change the
physical path and description properties of the directory. This is
useful when changing the location of a directory from a local path
to a network share. However, the API does not copy your content and
data from the old path to the new path. This has to be done
independently by the system administrator.
"""
url = "%s/edit" % self._url
params = {
"f" : "json"
}
if isinstance(value, PropertyMap):
value = dict(value)
for k,v in value.items():
params[k] = v
return self._con.post(path=url, postdata=params)
########################################################################
class PortalLicense(BasePortalAdmin):
"""
The Enterprise portal requires a valid license to function correctly.
This resource returns information for user types that are licensed
for your organization.
Starting at 10.7, the Enterprise portal enforces user type licensing.
Members are assigned a user type which determines the privileges that
an be granted to the member through a role. Each user type may
include access to specific apps and app bundles.
The license information returned for the organization includes the
total number of registered members that can be added, the current
number of members in the organization and the Portal for ArcGIS
version. For each user type, the license information includes the ID,
the maximum number of registered members that can be assigned, the
number of members currently assigned the license and the expiration,
in epoch time. In addition, this resource provides access to the
Validate License, Import License, Populate License, Update License
Manager, and Release License operations.
"""
#----------------------------------------------------------------------
def __init__(self, url, gis=None, **kwargs):
"""Constructor"""
super(License, self).__init__(url=url,
gis=gis,
**kwargs)
initialize = kwargs.pop("initialize", False)
if isinstance(gis, _ArcGISConnection):
self._con = gis
elif isinstance(gis, GIS):
self._gis = gis
self._con = gis._con
else:
raise ValueError(
"connection must be of type GIS or _ArcGISConnection")
if initialize:
self._init(self._gis)
#----------------------------------------------------------------------
def import_license(self, file):
"""
The `import_license` operation is used to import a new license
file. The portal license file contains your Enterprise portal's
user type, app and app bundle licenses. By importing a portal
license file, you will be applying the licenses in the file to your
organization.
Caution:
Importing a new portal license file will overwrite your
organization's current user type, app, and app bundle licenses.
Before importing, verify that the new license file has
sufficient user type, app, and app bundle licenses.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
file Required String. The portal license file.
=========================== ====================================================================
:returns: Boolean
"""
file = {'file' : file}
params = {'f' : "json"}
url = "%s/importLicense" % self._url
res = self._con.post(url, params, files=file)
if 'success' in res:
return res['success']
return res
#----------------------------------------------------------------------
def populate(self):
"""
The `populate` operation applies the license information from the
license file that is used to create or upgrade your portal. This
operation is only necessary as you create or upgrade your portal
through the Portal Admin API.
:returns: boolean
"""
params = {'f' : "json"}
url = "%s/populateLicense" % self._url
res = self._con.post(url, params)
if 'success' in res:
return res['success']
return res
#----------------------------------------------------------------------
def release_license(self, username):
"""
If a user checks out an ArcGIS Pro license for offline or
disconnected use, this operation releases the license for the
specified account. A license can only be used with a single device
running ArcGIS Pro. To check in the license, a valid access token
and refresh token is required. If the refresh token for the device
is lost, damaged, corrupted, or formatted, the user will not be
able to check in the license. This prevents the user from logging
in to ArcGIS Pro from any other device. As an administrator, you
can release the license. This frees the outstanding license and
allows the user to check out a new license or use ArcGIS Pro in a
connected environment.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
username Required String. The user name of the account.
=========================== ====================================================================
:returns: Boolean
"""
params = {
'f' : "json",
}
url = "%s/releaseLicense" % self._url
res = self._con.post(url, params)
if 'success' in res:
return res['success']
return res
#----------------------------------------------------------------------
def update(self, info):
"""
ArcGIS License Server Administrator works with your portal and
enforces licenses for ArcGIS Pro. This operation allows you to
change the license server connection information for your portal.
You can register a backup license manager for high availability of
your licensing portal. After configuring the backup license
manager, Portal for ArcGIS is restarted automatically. When the
restart completes, the portal is configured with the backup license
server you specified. When configuring a backup license manager,
you will need to ensure that the backup is authorized using the
same license file as your portal.
:Note:
Previously, premium apps were licensed individually through the
portal. Starting at 10.7, there will no longer be separate
licensing for apps; the portal's user types, apps, and app
bundles will be licensed using a single portal license file.
Licensing ArcGIS Pro and Drone2Map requires licensing your
Enterprise portal's ArcGIS License Server Administrator
(license manager). Previously, users were required to import a
.lic file into the portal's license manager. They would then
generate a .json file through the license manager and import
the file into portal. Now, users licensing ArcGIS Pro and
Drone2Map import the same license file used to license their
portal into their license manager. Users are no longer required
to generate an additional license file in the license manager.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
info Required Dict. The JSON representation of the license server
connection information.
=========================== ====================================================================
:returns: Boolean
**Sample Usage**
>>> gis.admin.system.licenses.update(info={ "hostname": "licensemanager.domain.com,backuplicensemanager.domain.com",
"port": 27000
})
True
"""
params = {
'f' : "json",
'licenseManagerInfo' : info
}
url = "%s/updateLicenseManager" % self._url
res = self._con.post(url, params)
if 'success' in res:
return res['success']
return res
#----------------------------------------------------------------------
def validate(self, file, list_ut=False):
"""
The `validate` operation is used to validate an input license file.
Only valid license files can be imported into the Enterprise
portal. If the provided file is valid, the operation will return
user type, app bundle, and app information from the license file.
If the file is invalid, the operation will fail and return an error
message.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
file Required String. The portal license file.
--------------------------- --------------------------------------------------------------------
list_ut Optional Boolean. Returns a list of user types that are compatible
with the Administrator role. This identifies the user type(s) that
can be assigned to the Initial Administrator Account when creating
a portal.
=========================== ====================================================================
:returns: Dict
"""
file = {'file' : file}
params = {'f' : "json",
'listAdministratorUserTypes' : list_ut}
url = "%s/validateLicense" % self._url
res = self._con.post(url, params, files=file)
return res
########################################################################
class Licenses(BasePortalAdmin):
"""
Portal for ArcGIS requires a valid license to function correctly. This
resource returns the current status of the license.
As of 10.2.1, Portal for ArcGIS enforces the license by checking the
number of registered members and comparing it with the maximum number
of members authorized by the license. Contact Esri Customer Service if
you have questions about license levels or expiration properties.
Starting at 10.5, Portal for ArcGIS enforces two levels of membership
for licensing to define sets of privileges for registered members and
their assigned roles.
**Deprecated at ArcGIS Enterprise 10.7**
"""
_gis = None
_con = None
_url = None
#----------------------------------------------------------------------
def __init__(self, url, gis=None, **kwargs):
"""Constructor"""
super(Licenses, self).__init__(url=url,
gis=gis,
**kwargs)
initialize = kwargs.pop("initialize", False)
if isinstance(gis, _ArcGISConnection):
self._con = gis
elif isinstance(gis, GIS):
self._gis = gis
self._con = gis._con
else:
raise ValueError(
"connection must be of type GIS or _ArcGISConnection")
if initialize:
self._init(self._gis)
#----------------------------------------------------------------------
def entitlements(self, app="arcgisprodesktop"):
"""
This operation returns the currently queued entitlements for a
product, such as ArcGIS Pro or Navigator for ArcGIS, and applies
them when their start dates become effective. It's possible that
all entitlements imported using the Import Entitlements operation
are effective immediately and no entitlements are added to the
queue. In this case, the operation returns an empty result.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
app Required string. The application lookup.
Allowed values: appstudioweb,arcgisprodesktop,busanalystonline_2,
drone2map,geoplanner,arcgisInsights,LRReporter,
navigator, or RoadwayReporter
=========================== ====================================================================
:returns: dict
"""
allowed = ["appstudioweb", "arcgisprodesktop",
"busanalystonline_2", "drone2map",
"geoplanner", "arcgisInsights",
"LRReporter", "navigator",
"RoadwayReporter"]
params = {
"f" : "json",
"appId" : app
}
if app not in allowed:
raise ValueError("The app value must be: %s" % ",".join(allowed))
url = "%s/getEntitlements" % self._url
return self._con.get(path=url, params=params)
#----------------------------------------------------------------------
def remove_entitlement(self, app="arcgisprodesktop"):
"""
deletes an entitlement from a site
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
app Required string. The application lookup.
Allowed values: appstudioweb,arcgisprodesktop,busanalystonline_2,
drone2map,geoplanner,arcgisInsights,LRReporter,
navigator, or RoadwayReporter
=========================== ====================================================================
:returns: dict
"""
allowed = ["appstudioweb", "arcgisprodesktop",
"busanalystonline_2", "drone2map",
"geoplanner", "arcgisInsights",
"LRReporter", "navigator",
"RoadwayReporter"]
params = {
"f" : "json",
"appId" : app
}
if app not in allowed:
raise ValueError("The app value must be: %s" % ",".join(allowed))
url = "%s/removeAllEntitlements" % self._url
return self._con.get(path=url, params=params)
#----------------------------------------------------------------------
def update_license_manager(self, info):
"""
ArcGIS License Server Administrator works with your portal and
enforces licenses for ArcGIS Pro. This operation allows you to
change the license server connection information for your portal.
When you import entitlements into portal using the Import
Entitlements operation, a license server is automatically
configured for you. If your license server changes after the
entitlements have been imported, you only need to change the
license server connection information.
You can register a backup license manager for high availability of
your licensing portal. When configuring a backup license manager,
you need to make sure that the backup license manager has been
authorized with the same organizational entitlements. After
configuring the backup license manager, Portal for ArcGIS is
restarted automatically. When the restart completes, the portal is
configured with the backup license server you specified.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
info Required string. The JSON representation of the license server
connection information.
=========================== ====================================================================
:returns: dict
"""
params = {
"f" : "json",
"licenseManagerInfo" : info
}
url = "%s/updateLicenseManager" % self._url
return self._con.post(path=url, postdata=params)
#----------------------------------------------------------------------
def import_entitlements(self, file, application):
"""
This operation allows you to import entitlements for ArcGIS Pro and
additional products such as Navigator for ArcGIS into your
licensing portal. Once the entitlements have been imported, you can
assign licenses to users within your portal. The operation requires
an entitlements file that has been exported out of your ArcGIS
License Server Administrator or out of My Esri, depending on the
product.
A typical entitlements file will have multiple parts, each
representing a set of entitlements that are effective at a specific
date. The parts that are effective immediately will be configured
to be the current entitlements. Other parts will be added to a
queue. The portal framework will automatically apply the parts when
they become effective. You can use the Get Entitlements operation
to see the parts that are in the queue.
Each time this operation is invoked, it overwrites all existing
entitlements, even the ones that are in the queue.
=========================== ====================================================================
**Argument** **Description**
--------------------------- --------------------------------------------------------------------
file Required string. The entitlement file to load into Enterprise.
--------------------------- --------------------------------------------------------------------
application Required string. The application identifier to be imported
=========================== ====================================================================
:returns: dict
"""
url = "%s/importEntitlements" % self._url
params = {
"f" : "json",
"appId" : application
}
files = {'file' : file}
return self._con.post(path=url,
postdata=params,
files=files)
#----------------------------------------------------------------------
def remove_all(self, application):
"""
This operation removes all entitlements from the portal for ArcGIS
Pro or additional products such as Navigator for ArcGIS and revokes
all entitlements assigned to users for the specified product. The
portal is no longer a licensing portal for that product.
License assignments are retained on disk. Therefore, if you decide
to configure this portal as a licensing portal for the product
again in the future, all licensing assignments will be available in
the website.
"""
params = {"f" : "json",
"appId" : application}
url = "%s/removeAllEntitlements" % self._url
return self._con.get(path=url, params=params)
#----------------------------------------------------------------------
def release_license(self, username):
"""
If a user checks out an ArcGIS Pro license for offline or
disconnected use, this operation releases the license for the
specified account. A license can only be used with a single device
running ArcGIS Pro. To check in the license, a valid access token
and refresh token is required. If the refresh token for the device
is lost, damaged, corrupted, or formatted, the user will not be
able to check in the license. This prevents the user from logging
in to ArcGIS Pro from any other device. As an administrator, you
can release the license. This frees the outstanding license and
allows the user to check out a new license or use ArcGIS Pro in a
connected environment.
"""
params = {"f" : "json",
"username" : username
}
url = "%s/releaseLicense" % self._url
return self._con.get(path=url, params=params)
| [
"sherbaz.hashmi@gmail.com"
] | sherbaz.hashmi@gmail.com |
38b9929591eb96f24bcdef78f5439e7dda136927 | 0baeeda7b48da3a199cf471e731b9679c95cebcc | /sc/templer/core/generic_setup.py | 227cb6cfbd2f890b0d44703164d3f4f413988ef0 | [] | no_license | simplesconsultoria/sc.templer.core | 15ee9340fbaa4615ac405d8cec2ce6985fa91113 | 8f1e8a7cf5b1bcd77bf963cd406b5ac152c7cfa4 | refs/heads/master | 2021-01-20T10:41:40.380015 | 2012-04-24T23:50:49 | 2012-04-24T23:50:49 | 2,964,807 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 750 | py | # -*- coding:utf-8 -*-
from templer.core.vars import EXPERT
from templer.core.vars import BooleanVar
gs_vars = [
BooleanVar(
'add_profile',
title='Register a Default Profile',
description='Should this package register a Default GS Profile',
modes=(EXPERT, ),
default=True,
structures={'False': None, 'True': 'gs_nested_default'},
),
BooleanVar(
'add_profile_uninstall',
title='Register an Uninstall Profile',
description='Should this package register an Uninstall GS Profile',
modes=(EXPERT, ),
default=True,
structures={'False': None, 'True': 'gs_nested_uninstall'},
),
] | [
"erico@simplesconsultoria.com.br"
] | erico@simplesconsultoria.com.br |
634fd510f27f199252bf1c265194f3f4af44359e | 5a0dfe1326bb166d6dfaf72ce0f89ab06e963e2c | /leetcode/lc361.py | d32d70cb24c130e5f8349d2bcb7669a777c4c691 | [] | no_license | JasonXJ/algorithms | 7bf6a03c3e26f917a9f91c53fc7b2c65669f7692 | 488d93280d45ea686d30b0928e96aa5ed5498e6b | refs/heads/master | 2020-12-25T15:17:44.345596 | 2018-08-18T07:20:27 | 2018-08-18T07:20:27 | 67,798,458 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,007 | py | class Solution(object):
def maxKilledEnemies(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if len(grid) == 0 or len(grid[0]) == 0:
return 0
nrows = len(grid)
ncols = len(grid[0])
can_kill = [ [ [0] * 4 for _ in range(ncols) ] for _ in range(nrows)]
for row in range(nrows):
for col in range(ncols):
if grid[row][col] == 'E':
can_kill[row][col] = [1] * 4
# Handle upward and leftward enemies
for row in range(nrows):
for col in range(ncols):
if grid[row][col] != 'W':
if row > 0:
can_kill[row][col][0] += can_kill[row-1][col][0]
if col > 0:
can_kill[row][col][1] += can_kill[row][col-1][1]
# Handle downward and rightward enemies
for row in range(nrows-1, -1, -1):
for col in range(ncols-1, -1, -1):
if grid[row][col] != 'W':
if row < nrows - 1:
can_kill[row][col][2] += can_kill[row+1][col][2]
if col < ncols - 1:
can_kill[row][col][3] += can_kill[row][col+1][3]
maximum = 0
for row in range(nrows):
for col in range(ncols):
if grid[row][col] == '0':
maximum = max(maximum, sum(can_kill[row][col]))
return maximum
def test():
def check(grid, expected):
converted_grid = [
list(line.strip())
for line in grid.strip().splitlines()
]
assert Solution().maxKilledEnemies(converted_grid) == expected
check(
"""
0E00
E0WE
0E00
""", 3
)
check('', 0)
check(
"""
0EE0
E0WE
0E00
""", 3
)
check(
"""
E0EE
E0WE
0E00
""", 4
)
| [
"lxj2048@gmail.com"
] | lxj2048@gmail.com |
0d93a4c9daefd91f328a835a36e56e41ff4c3d2c | 4252102a1946b2ba06d3fa914891ec7f73570287 | /pylearn2/sandbox/cuda_convnet/tests/test_weight_acts.py | 8158dd02343ec2f4e08001c2116ffb5405fd8327 | [] | no_license | lpigou/chalearn2014 | 21d487f314c4836dd1631943e20f7ab908226771 | 73b99cdbdb609fecff3cf85e500c1f1bfd589930 | refs/heads/master | 2020-05-17T00:08:11.764642 | 2014-09-24T14:42:00 | 2014-09-24T14:42:00 | 24,418,815 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 4,673 | py | __authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "Ian Goodfellow"
__email__ = "goodfeli@iro"
from pylearn2.testing.skip import skip_if_no_gpu
skip_if_no_gpu()
import numpy as np
from theano import shared
from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs
from pylearn2.sandbox.cuda_convnet.weight_acts import WeightActs
from theano.sandbox.cuda import gpu_from_host
from theano.sandbox.cuda import host_from_gpu
from theano.sandbox.rng_mrg import MRG_RandomStreams
import theano.tensor as T
from theano.tensor.nnet.conv import conv2d
from theano.tensor import as_tensor_variable
from theano import function
import warnings
def test_match_grad_valid_conv():
# Tests that weightActs is the gradient of FilterActs
# with respect to the weights.
for partial_sum in [0, 1, 4]:
rng = np.random.RandomState([2012,10,9])
batch_size = 3
rows = 7
cols = 9
channels = 8
filter_rows = 4
filter_cols = filter_rows
num_filters = 16
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'), name='images')
filters = shared(rng.uniform(-1., 1., (channels, filter_rows,
filter_cols, num_filters)).astype('float32'), name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
output = FilterActs(partial_sum=partial_sum)(gpu_images, gpu_filters)
output = host_from_gpu(output)
images_bc01 = images.dimshuffle(3,0,1,2)
filters_bc01 = filters.dimshuffle(3,0,1,2)
filters_bc01 = filters_bc01[:,:,::-1,::-1]
output_conv2d = conv2d(images_bc01, filters_bc01,
border_mode='valid')
output_conv2d = output_conv2d.dimshuffle(1,2,3,0)
theano_rng = MRG_RandomStreams(2013 + 1 + 31)
coeffs = theano_rng.normal(avg=0., std=1., size=output_conv2d.shape, dtype='float32')
cost_conv2d = (coeffs * output_conv2d).sum()
weights_grad_conv2d = T.grad(cost_conv2d, filters)
cost = (coeffs * output).sum()
hid_acts_grad = T.grad(cost, output)
weights_grad = WeightActs(partial_sum=partial_sum)(
gpu_images,
gpu_from_host(hid_acts_grad),
as_tensor_variable((4, 4))
)[0]
weights_grad = host_from_gpu(weights_grad)
f = function([], [output, output_conv2d, weights_grad, weights_grad_conv2d])
output, output_conv2d, weights_grad, weights_grad_conv2d = f()
if np.abs(output - output_conv2d).max() > 8e-6:
assert type(output) == type(output_conv2d)
assert output.dtype == output_conv2d.dtype
if output.shape != output_conv2d.shape:
print 'cuda-convnet shape: ',output.shape
print 'theano shape: ',output_conv2d.shape
assert False
err = np.abs(output - output_conv2d)
print 'absolute error range: ', (err.min(), err.max())
print 'mean absolute error: ', err.mean()
print 'cuda-convnet value range: ', (output.min(), output.max())
print 'theano value range: ', (output_conv2d.min(), output_conv2d.max())
assert False
warnings.warn("""test_match_grad_valid_conv success criterion is not very strict. Can we verify that this is OK?
One possibility is that theano is numerically unstable and Alex's code is better.
Probably theano CPU 64 bit is OK but it's worth checking the others.""")
if np.abs(weights_grad - weights_grad_conv2d).max() > 8.6e-6:
if type(weights_grad) != type(weights_grad_conv2d):
raise AssertionError("weights_grad is of type " + str(weights_grad))
assert weights_grad.dtype == weights_grad_conv2d.dtype
if weights_grad.shape != weights_grad_conv2d.shape:
print 'cuda-convnet shape: ',weights_grad.shape
print 'theano shape: ',weights_grad_conv2d.shape
assert False
err = np.abs(weights_grad - weights_grad_conv2d)
print 'absolute error range: ', (err.min(), err.max())
print 'mean absolute error: ', err.mean()
print 'cuda-convnet value range: ', (weights_grad.min(), weights_grad.max())
print 'theano value range: ', (weights_grad_conv2d.min(), weights_grad_conv2d.max())
assert False
if __name__ == '__main__':
test_match_grad_valid_conv()
| [
"lionelpigou@gmail.com"
] | lionelpigou@gmail.com |
1aaebbd2b104512b2853e34339d5975a5909a7e2 | 2455f25b08b2d2bcdccd666a29975b032e74cea8 | /oauth_login/login/__init__.py | 1c313ecf741a8525f0d69119189313d14407c4bb | [
"Apache-2.0"
] | permissive | vinoth3v/In_addon_oauth_login | 8b3537b00398cf7b8cfe98b649187293ba5c6fd1 | b7ebfaa8d3a3c455d58300ac7c23da761273aadf | refs/heads/master | 2021-01-10T02:22:40.074592 | 2015-12-16T03:15:04 | 2015-12-16T03:15:04 | 48,084,350 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | from .oauth_login import *
from .google import *
from .facebook import * | [
"vinoth.3v@gmail.com"
] | vinoth.3v@gmail.com |
8e63a7fcf360bfed8263a4df56a298eee731d690 | 33c4bc9ca463ce0ec61945fca5841c9d8a18ab8e | /thrift/lib/py3/test/exceptions.py | 43419dbc5e515f17dcb9c999576bb9179c86b613 | [
"Apache-2.0"
] | permissive | gaurav1086/fbthrift | d54bb343bf1a8503dd329fbfcd0b46fe9f70754c | 68d1a8790bfd5b3974e1b966c8071f9c456b6c6a | refs/heads/master | 2020-12-27T22:41:09.452839 | 2020-02-03T23:56:20 | 2020-02-03T23:58:33 | 238,088,855 | 0 | 0 | Apache-2.0 | 2020-02-04T00:13:04 | 2020-02-04T00:13:03 | null | UTF-8 | Python | false | false | 3,529 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from testing.types import UnusedError, HardError, SimpleError, Color, UnfriendlyError
from .exception_helper import simulate_UnusedError, simulate_HardError
from thrift.py3 import Error
class ExceptionTests(unittest.TestCase):
def test_hashability(self) -> None:
hash(UnusedError())
def test_creation_optional_from_c(self) -> None:
msg = "this is what happened"
x = simulate_UnusedError(msg)
self.assertIsInstance(x, UnusedError)
self.assertIn(msg, str(x))
self.assertIn(msg, x.args)
self.assertEqual(msg, x.message)
self.assertEqual(UnusedError(*x.args), x) # type: ignore
def test_exception_message_annotation(self) -> None:
x = UnusedError(message="something broke")
self.assertEqual(x.message, str(x))
y = HardError("WAT!", 22) # type: ignore
self.assertEqual(y.errortext, str(y))
z = UnfriendlyError("WAT!", 22) # type: ignore
self.assertNotEqual(z.errortext, str(z))
self.assertNotEqual(str(y), str(z))
def test_creation_optional_from_python(self) -> None:
msg = "something broke"
UnusedError()
x = UnusedError(msg) # type: ignore
y = UnusedError(message=msg)
self.assertEqual(x, y)
self.assertEqual(x.args, y.args)
self.assertEqual(x.message, y.message)
self.assertEqual(str(x), str(x))
def test_creation_required_from_c(self) -> None:
msg = "ack!"
code = 22
x = simulate_HardError(msg, code)
self.assertIsInstance(x, HardError)
self.assertIn(msg, str(x))
self.assertIn(msg, x.args)
self.assertIn(code, x.args)
self.assertEqual(code, x.code)
self.assertEqual(msg, x.errortext)
self.assertEqual(x, HardError(*x.args)) # type: ignore
def test_creation_required_from_python(self) -> None:
msg = "ack!"
code = 22
with self.assertRaises(TypeError):
HardError(msg) # type: ignore
x = HardError(msg, code) # type: ignore
y = HardError(msg, code=code) # type: ignore
self.assertEqual(x, y)
self.assertEqual(x.args, y.args)
self.assertEqual(x.errortext, y.errortext)
self.assertEqual(x.code, y.code)
self.assertEqual(str(x), str(y))
def test_raise(self) -> None:
with self.assertRaises(SimpleError):
raise SimpleError()
with self.assertRaises(Error):
raise SimpleError(Color.red)
with self.assertRaises(Exception):
raise SimpleError()
with self.assertRaises(BaseException):
raise SimpleError()
x = SimpleError(Color.blue)
self.assertIsInstance(x, BaseException)
self.assertIsInstance(x, Exception)
self.assertIsInstance(x, Error)
self.assertIsInstance(x, SimpleError)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
4d5fa5eafc532235892784af3064d77145c79f15 | 85b402cd9e762b2749c978105ea362b10d335e5c | /114_auto_image_quality_assessment_BRISQUE.py | 8f4ffa79a306138610b23420218c52a56ad6434d | [] | no_license | bnsreenu/python_for_microscopists | 29c08f17461baca95b5161fd4cd905be515605c4 | 4b8c0bd4274bc4d5e906a4952988c7f3e8db74c5 | refs/heads/master | 2023-09-04T21:11:25.524753 | 2023-08-24T18:40:53 | 2023-08-24T18:40:53 | 191,218,511 | 3,010 | 2,206 | null | 2023-07-25T07:15:22 | 2019-06-10T17:53:14 | Jupyter Notebook | UTF-8 | Python | false | false | 3,095 | py | #!/usr/bin/env python
__author__ = "Sreenivas Bhattiprolu"
__license__ = "Feel free to copy, I appreciate if you acknowledge Python for Microscopists"
# https://youtu.be/Vj-YcXswTek
"""
BRISQUE calculates the no-reference image quality score for an image using the
Blind/Referenceless Image Spatial Quality Evaluator (BRISQUE).
BRISQUE score is computed using a support vector regression (SVR) model trained on an
image database with corresponding differential mean opinion score (DMOS) values.
The database contains images with known distortion such as compression artifacts,
blurring, and noise, and it contains pristine versions of the distorted images.
The image to be scored must have at least one of the distortions for which the model was trained.
Mittal, A., A. K. Moorthy, and A. C. Bovik. "No-Reference Image Quality Assessment in the Spatial Domain.
" IEEE Transactions on Image Processing. Vol. 21, Number 12, December 2012, pp. 4695–4708.
https://live.ece.utexas.edu/publications/2012/TIP%20BRISQUE.pdf
To install imquality
https://pypi.org/project/image-quality/
"""
import numpy as np
from skimage import io, img_as_float
import imquality.brisque as brisque
#img = img_as_float(io.imread('noisy_images/BSE.jpg', as_gray=True))
img = img_as_float(io.imread('noisy_images/BSE_50sigma_noisy.jpg', as_gray=True))
score = brisque.score(img)
print("Brisque score = ", score)
#Now let us check BRISQUE scores for a bunch of blurred images.
img0 = img_as_float(io.imread('noisy_images/BSE.jpg', as_gray=True))
img25 = img_as_float(io.imread('noisy_images/BSE_1sigma_blur.jpg', as_gray=True))
img50 = img_as_float(io.imread('noisy_images/BSE_2sigma_blur.jpg', as_gray=True))
img75 = img_as_float(io.imread('noisy_images/BSE_3sigma_blur.jpg', as_gray=True))
img100 = img_as_float(io.imread('noisy_images/BSE_5sigma_blur.jpg', as_gray=True))
img200 = img_as_float(io.imread('noisy_images/BSE_10sigma_blur.jpg', as_gray=True))
score0 = brisque.score(img0)
score25 = brisque.score(img25)
score50 = brisque.score(img50)
score75 = brisque.score(img75)
score100 = brisque.score(img100)
score200 = brisque.score(img200)
print("BRISQUE Score for 0 blur = ", score0)
print("BRISQUE Score for 1 sigma blur = ", score25)
print("BRISQUE Score for 2 sigma blur = ", score50)
print("BRISQUE Score for 3 sigma blur = ", score75)
print("BRISQUE Score for 5 sigma blur = ", score100)
print("BRISQUE Score for 10 sigma blur = ", score200)
# Peak signal to noise ratio (PSNR) is Not a good metric.
from skimage.metrics import peak_signal_noise_ratio
psnr_25 = peak_signal_noise_ratio(img0, img25)
psnr_50 = peak_signal_noise_ratio(img0, img50)
psnr_75 = peak_signal_noise_ratio(img0, img75)
psnr_100 = peak_signal_noise_ratio(img0, img100)
psnr_200 = peak_signal_noise_ratio(img0, img200)
print("PSNR for 1 sigma blur = ", psnr_25)
print("PSNR for 2 sigma blur = ", psnr_50)
print("PSNR for 3 sigma blur = ", psnr_75)
print("PSNR for 5 sigma blur = ", psnr_100)
print("PSNR for 10 sigma blur = ", psnr_200)
| [
"noreply@github.com"
] | bnsreenu.noreply@github.com |
5e9c0b270abbdb2d315cecc707efc0b71198eb0d | e457376950380dd6e09e58fa7bee3d09e2a0f333 | /plugin/src/test/resources/refactoring/move/importSlash/after/src/file1.py | c069260ef5aade13f421dd49bfe732ac66fbd2af | [
"Apache-2.0"
] | permissive | consulo/consulo-python | b816b7b9a4b346bee5d431ef6c39fdffe40adf40 | e191cd28f043c1211eb98af42d3c0a40454b2d98 | refs/heads/master | 2023-08-09T02:27:03.585942 | 2023-07-09T08:33:47 | 2023-07-09T08:33:47 | 12,317,018 | 0 | 0 | Apache-2.0 | 2020-06-05T17:16:50 | 2013-08-23T07:16:43 | Java | UTF-8 | Python | false | false | 55 | py | __author__ = 'catherine'
def function_1():
pass
| [
"vistall.valeriy@gmail.com"
] | vistall.valeriy@gmail.com |
57325adb88ec75ed2ebb7aad5993a18a0458adc8 | 08191bb5dd52ab63aa0210d81c4d7dfd856365f7 | /osa05-18_vanhin_henkiloista/src/vanhin_henkiloista.py | 5926727dbbae0d2b290b29ec83fc82466c28a81a | [] | no_license | JoukoRintamaki/mooc-ohjelmointi-21 | 446d77477a8752182cb4568cce184795371cd506 | ebc80495e4c6654d6beab47593d2c1b06bf12d08 | refs/heads/main | 2023-03-11T14:53:33.949801 | 2021-02-28T14:57:24 | 2021-02-28T14:57:24 | 336,796,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | def vanhin(henkilot: list):
vanhin=""
vanhinIka=2999
for henkilo in henkilot:
if henkilo[1] < vanhinIka:
vanhin=henkilo[0]
vanhinIka=henkilo[1]
return vanhin
if __name__ == "__main__":
h1 = ("Arto", 1977)
h2 = ("Einari", 1985)
h3 = ("Maija", 1953)
h4 = ("Essi", 1997)
hlista = [h1, h2, h3, h4]
print(vanhin(hlista))
| [
"jouko.rintamaki@live.fi"
] | jouko.rintamaki@live.fi |
fa73c07e3907644afa9f63ac97e01e28926855d5 | bfb113c3076f5b0570953583e7a2321c774d73ea | /request_modules/request_module_example5.py | 106dde71122751e8cc4a2c53f2b4162e6af91ce3 | [] | no_license | gsudarshan1990/Training_Projects | 82c48d5492cb4be94db09ee5c66142c370794e1c | 2b7edfafc4e448bd558c034044570496ca68bf2d | refs/heads/master | 2022-12-10T15:56:17.535096 | 2020-09-04T06:02:31 | 2020-09-04T06:02:31 | 279,103,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | """
This is the example of the request module which provides the data in the json format
"""
import requests
response_object = requests.get("https://api.datamuse.com/words?rel_rhy=funny")#This is the query parameter
dict_object = response_object.json()#This obtaining the dictionary
print(dict_object) | [
"sudarshan2009@live.in"
] | sudarshan2009@live.in |
d1ea690ce6fc3362e7be323e77a96e59945debfb | a77a11cccf133168f9cf2178f4a3b572f0ff7338 | /test_data/test_data.py | 6fe0ccd83c3310d52308d4e09e773dcff6027ab2 | [] | no_license | redsnowc/jobplus11-2 | f21ab84c3f7bc2576ef275099355ab88cf763bea | 33fa0e6d0cbd9987de29c0027b676f4ba847115d | refs/heads/master | 2020-05-17T00:11:56.445400 | 2019-05-13T08:45:41 | 2019-05-13T08:45:41 | 183,390,450 | 0 | 0 | null | 2019-04-25T08:27:31 | 2019-04-25T08:27:31 | null | UTF-8 | Python | false | false | 2,371 | py | # -*- coding: UTF-8 -*-
from jobplus.models import db, User, CompanyInfo, UserInfo, Job
from random import randint, choice
from faker import Faker
fe = Faker()
fc = Faker('zh-cn')
def create_user():
for i in range(4):
user = User()
user_info = UserInfo()
user.username = fe.name()
user.email = fe.email()
user.password = '123456'
user_info.name = fc.name()
user_info.phone_number = fc.phone_number()
user_info.experience = randint(0, 10)
user_info.resume = fe.url()
user_info.user = user
db.session.add(user)
db.session.add(user_info)
db.session.commit()
def create_company():
for i in range(16):
user = User()
company_info = CompanyInfo()
user.username = fc.company()
user.email = fe.email()
user.password = '123456'
user.role = 20
company_info.address = fc.city()
company_info.domain = '互联网 AI 大数据'
company_info.intro = ''.join(fc.words(8))
company_info.detail = fc.text()
company_info.logo = 'https://s2.ax1x.com/2019/04/28/EMmHjH.png'
company_info.website = fc.url()
company_info.company = user
db.session.add(user)
db.session.add(company_info)
db.session.commit()
def create_job():
for user in User.query.filter_by(role=20).all():
for i in range(4):
job = Job()
job.title = fc.job()
job.salary_lower = randint(2, 20)
job.salary_upper = job.salary_lower + randint(2, 10)
job.experience_lower = randint(0, 4)
if job.experience_lower == 0:
job.experience_upper = 0
else:
job.experience_upper = job.experience_lower + randint(1,4)
job.education = choice(['大专', '本科', '硕士', '博士', '经验不限'])
job.tags = 'Python Flask Mysql'
job.intro = fc.text()
job.company = user
db.session.add(job)
db.session.commit()
def create_admin():
user = User()
user.username = 'admin'
user.email = 'admin@admin.com'
user.password = '123456'
user.role = 30
db.session.add(user)
db.session.commit()
def run():
create_user()
create_company()
create_job()
create_admin()
| [
"nuanyang.44@gmail.com"
] | nuanyang.44@gmail.com |
e6dd618a067b490a15f501db49b3726a1a3a06ca | e08e192020fa951b57f16aea0ee1376a74115c53 | /irim/external/invertible_rim/examples/irim_denoising.py | 255071fb600c30d84e20b56bf6091e6c853a4764 | [
"MIT"
] | permissive | fcaliva/FNAF-fastMRI | 3d5912a20e6eef063ae534139257ba7dc08c0ed6 | 9e2aea9f8b0d794c617ca822980c6d85ab6557fc | refs/heads/main | 2023-03-21T00:20:42.142264 | 2021-03-03T23:32:25 | 2021-03-03T23:32:25 | 354,087,548 | 1 | 0 | MIT | 2021-04-02T17:27:20 | 2021-04-02T17:27:20 | null | UTF-8 | Python | false | false | 4,571 | py | """
A simple demonstration for training an IRIM as an image denoiser.
This script will lead through the process of defining a gradient function for the IRIM, then
building the RIM model, and how to train the model using invert to learn. This script will utilize
CUDA devices if available.
"""
import torch
from irim import IRIM
from irim import InvertibleUnet
from irim import MemoryFreeInvertibleModule
# Use CUDA if available
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# ---- Parameters ---
# Working with images, for time series or volumes set to 1 or 3, respectively
conv_nd = 2
# Number of Householder projections for constructing 1x1 convolutions
n_householder = 3
# Number of channels for each layer of the Invertible Unet
n_channels = [16,8,4,8,16]
# Number of hidden channel in the residual functions of the Invertible Unet
n_hidden = [16] * 5
# Downsampling factors
dilations = [1,2,4,2,1]
# Number of IRIM steps
n_steps = 5
# Number of image channels
im_channels = 3
# Number of total samples
n_samples = 64
im_size = 32
learning_rate = 1e-3
def grad_fun(x_est,y):
"""
Defines the gradient function for a denoising problem with White Noise.
This function demonstrates the use of Pytorch's autograd to calculate the gradient.
In this example, the function is equivalent to
def grad_fun(x_est,y):
return x_est - y
:param x_est: Tensor, model estimate of x
:param y: Tensor, noisy measurements
:return: grad_x
"""
# True during training, False during testing
does_require_grad = x_est.requires_grad
with torch.enable_grad():
# Necessary for using autograd
x_est.requires_grad_(True)
# Assuming uniform white noise, in the denoising case matrix A is the identity
error = torch.sum((y - x_est)**2)
# We retain the graph during training only
grad_x = torch.autograd.grad(error, inputs=x_est, retain_graph=does_require_grad,
create_graph=does_require_grad)[0]
# Set requires_grad back to it's original state
x_est.requires_grad_(does_require_grad)
return grad_x
# def grad_fun(x_est, y):
# return x_est - y
# At every iteration of the IRIM we use an Invertible Unet for processing. Note, that the use of ModuleList
# is necessary for Pytorch to properly register all modules.
step_models = torch.nn.ModuleList([InvertibleUnet(n_channels=n_channels,n_hidden=n_hidden,dilations=dilations,
conv_nd=conv_nd, n_householder=n_householder) for i in range(n_steps)])
# Build IRIM
model = IRIM(step_models,grad_fun,im_channels)
# Wrap the model to be trained with invert to learn
model = MemoryFreeInvertibleModule(model)
model.to(device)
# Use DataParallel if multiple devices are available
if torch.cuda.device_count() > 1:
model = torch.nn.DataParallel(model)
optimizer = torch.optim.Adam(model.parameters(), learning_rate)
# We generate a simple toy data set where the ground truth data has the same values in the image
# dimensions but different values across batch and channel dimensions. This demonstrates that the
# IRIM can deal with the implicit structure in the data, with a high range of values, and it can even
# do extrapolation.
x = torch.ones(n_samples,im_channels,*[im_size]*conv_nd, requires_grad=False, device=device)
x = torch.cumsum(x,0)
x = torch.cumsum(x,1)
y = x + torch.randn_like(x)
# Training and test split. This will result un an extrapolation problem on the test set.
y, y_test = torch.chunk(y,2,0)
x, x_test = torch.chunk(x,2,0)
# Initial states of the IRIM
x_in = torch.cat((y,torch.zeros(y.size(0),n_channels[0]-im_channels,*[im_size]*conv_nd, device=device)),1)
x_test_in = torch.cat((y_test,torch.zeros(y_test.size(0),n_channels[0]-im_channels,*[im_size]*conv_nd, device=device)),1)
x_in.requires_grad_(True)
x_test_in.requires_grad_(False)
for i in range(3000):
optimizer.zero_grad()
model.zero_grad()
# We only regress on the image dimensions
x_est = model.forward(x_in, y)[:,:im_channels]
loss = torch.nn.functional.mse_loss(x_est, x)
loss.backward()
optimizer.step()
if i % 100 == 0:
model.eval()
with torch.no_grad():
x_est = model.forward(x_test_in, y_test)[:, :im_channels]
loss = torch.nn.functional.mse_loss(x_est, x_test)
loss_noisy = torch.nn.functional.mse_loss(y_test, x_test)
print('Iteration', i, ': test loss =',loss.item(), ' loss noisy image =',loss_noisy.item())
model.train()
| [
"francesco.caliva@ucsf.edu"
] | francesco.caliva@ucsf.edu |
2acd41e081d0d6ae8fca6257a31843d2bf777385 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_squires.py | 6fb9d7cec5aa9aa6373a452e937b439e2c13cec3 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py |
from xai.brain.wordbase.verbs._squire import _SQUIRE
#calss header
class _SQUIRES(_SQUIRE, ):
def __init__(self,):
_SQUIRE.__init__(self)
self.name = "SQUIRES"
self.specie = 'verbs'
self.basic = "squire"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
4b8ae280cf7bb7071353068bd315c4f5f8c0896d | c99479824a703df2e22364162533661c7f39df04 | /utils/bp/default/default_interface_chrono_local_date_time.py | 71b323a63ab9b6a1644d627b7ee4b851106c40e1 | [] | no_license | tylerrbowen/sample | d1ccefd7ed823c758d3bc43729d76fd72e0042ff | 0fcfecd532a9679bf3f927d5fcb03edb005c5dc4 | refs/heads/master | 2021-01-01T06:34:05.647543 | 2013-12-07T00:13:02 | 2013-12-07T00:13:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,211 | py |
from default_interface_temporal import DefaultInterfaceTemporal
from abc import ABCMeta
from utils.bp.duration import ChronoUnit, ChronoUnitItem
from utils.bp.temporal.chrono_field import ChronoFieldItem, ChronoField
from utils.bp.chrono.chrono_local_date_time import ChronoLocalDateTime
from utils.bp.chrono.chronology import Chronology
from utils.bp.temporal.temporal_queries import TemporalQueries
from utils.bp.local_date import LocalDate
from utils.bp.instant import Instant
class DefaultInterfaceChronoLocalDateTime(DefaultInterfaceTemporal, ChronoLocalDateTime):
__metaclass__ = ABCMeta
def with_adjuster(self, adjuster):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).with_adjuster(adjuster))
def plus_temporal(self, temporal_amount):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).plus_temporal(temporal_amount))
def minus_temporal(self, temporal_amount):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).minus_temporal(temporal_amount))
def minus(self, amount=None, unit=None):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).minus(amount, unit))
def adjust_into(self, temporal):
return temporal.with_field(ChronoField.EPOCH_DAY, self.to_local_date().to_epoch_day()).\
with_field(ChronoField.NANO_OF_SECOND, self.to_local_time().to_nano_of_day())
def query(self, query):
if query == TemporalQueries.chronology():
return self.to_local_date().get_chronology()
elif query == TemporalQueries.precision():
return ChronoUnit.NANOS
elif query == TemporalQueries.local_date():
return LocalDate.of_epoch_day(self.to_local_date())
elif query == TemporalQueries.local_time():
return self.to_local_time()
return super(DefaultInterfaceChronoLocalDateTime, self).query(query)
def to_instant(self, offset):
return Instant.of_epoch_second(self.to_epoch_second(offset), self.to_local_time().get_nano())
def to_epoch_second(self, offset):
epoch_day = self.to_local_date().to_epoch_day()
secs = epoch_day * 86400 + self.to_local_time().to_second_of_day()
secs -= offset.get_total_seconds()
return secs
def __cmp__(self, other):
comp = self.to_local_date().__cmp__(other.to_local_date())
if comp == 0:
comp = self.to_local_time().__cmp__(other.to_local_time())
if comp == 0:
comp = self.to_local_date().get_chronology().__cmp__(other.to_local_date().get_chronology())
return comp
def is_after(self, other):
this_ep_day = self.to_local_date().to_epoch_day()
other_ep_day = other.to_local_date().to_epoch_day()
return this_ep_day > other_ep_day or \
(this_ep_day == other_ep_day and self.to_local_time().to_nano_of_day() > other.to_local_time().to_nano_of_day())
def is_before(self, other):
this_ep_day = self.to_local_date().to_epoch_day()
other_ep_day = other.to_local_date().to_epoch_day()
return this_ep_day < other_ep_day or \
(this_ep_day == other_ep_day and self.to_local_time().to_nano_of_day() < other.to_local_time().to_nano_of_day())
def is_equal(self, other):
return self.to_local_time().to_nano_of_day() == other.to_local_time().to_nano_of_day() and \
self.to_local_date().to_epoch_day() == other.to_local_date().to_epoch_day()
def __eq__(self, other):
if self is other:
return True
if isinstance(other, ChronoLocalDateTime):
return self.__cmp__(other) == 0
return False
def __hash__(self):
return self.to_local_date().__hash__() ^ self.to_local_time().__hash__()
def __str__(self):
return self.to_local_date().__str__() + 'T' + self.to_local_time().__str__()
| [
"tyler.r.bowen@gmail.com"
] | tyler.r.bowen@gmail.com |
3efaeb9b2ba2bb7ab55362bd8a91152e1af3525a | 5d0edf31b17c5375faf6126c1a7be8e79bfe2ab8 | /buildout-cache/eggs/Products.PortalTransforms-2.1.12-py2.7.egg/Products/PortalTransforms/transforms/word_to_html.py | 624225f53b94fa9955067ec501f72cb4da975df2 | [] | no_license | renansfs/Plone_SP | 27cba32ebd9fc03dae3941ec23cf1bf0a7b6667a | 8a7bdbdb98c3f9fc1073c6061cd2d3a0ec80caf5 | refs/heads/master | 2021-01-15T15:32:43.138965 | 2016-08-24T15:30:19 | 2016-08-24T15:30:19 | 65,313,812 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,519 | py | # -*- coding: utf-8 -*-
from Products.PortalTransforms.interfaces import ITransform
from zope.interface import implementer
import os
EXTRACT_BODY = 1
EXTRACT_STYLE = 0
FIX_IMAGES = 1
IMAGE_PREFIX = "img_"
# disable office_uno because it doesn't support multithread yet
ENABLE_UNO = False
if os.name == 'posix':
try:
if ENABLE_UNO:
from office_uno import document
else:
raise
except:
from office_wvware import document
else:
try:
if ENABLE_UNO:
from office_uno import document
else:
raise
except:
from office_com import document
@implementer(ITransform)
class word_to_html(object):
__name__ = "word_to_html"
inputs = ('application/msword',)
output = 'text/html'
output_encoding = 'utf-8'
tranform_engine = document.__module__
def name(self):
return self.__name__
def convert(self, data, cache, **kwargs):
orig_file = 'unknown.doc'
doc = None
try:
doc = document(orig_file, data)
doc.convert()
html = doc.html()
path, images = doc.subObjects(doc.tmpdir)
objects = {}
if images:
doc.fixImages(path, images, objects)
cache.setData(html)
cache.setSubObjects(objects)
return cache
finally:
if doc is not None:
doc.cleanDir(doc.tmpdir)
def register():
return word_to_html()
| [
"renansfs@gmail.com"
] | renansfs@gmail.com |
b7c0556a288c8c26517fd225c1425ce43b08ea25 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/fbs_0212+349/sdB_FBS_0212+349_coadd.py | 85c8c3b353e426ba0c09356dcd02332631d61bb9 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[33.889042,35.189722], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_FBS_0212+349/sdB_FBS_0212+349_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_FBS_0212+349/sdB_FBS_0212+349_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"thomas@boudreauxmail.com"
] | thomas@boudreauxmail.com |
dd4973b847efb1f0b4a23a8275038669c6b2eba1 | 5686100c4ed0436347107f4e9faae30fca609c09 | /leetcode/1287. Element Appearing More Than 25% In Sorted Array/Solution.py | 52841b8dd96cec695b503398fb1062cf2a443dd5 | [] | no_license | adamqddnh/algorithm-questions | 7d4f56b7e5ac2ff9460774d43ecf8cba2cd7b0cb | 93a1b082e10ade0dd464deb80b5df6c81552f534 | refs/heads/master | 2023-06-29T04:51:26.635740 | 2021-07-23T09:11:45 | 2021-07-23T09:11:45 | 252,675,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 670 | py | class Solution(object):
def findSpecialInteger(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
last = 0
result = 1
length = len(arr)
for i in range(1, length):
if arr[i] == arr[last]:
result = max(result, i - last + 1)
else:
last = i
if result > length / 4:
return arr[last]
return arr[last]
"""
:A new idea
"""
# def findSpecialInteger(self, arr):
# n = len(arr) // 4
# for i in range(len(arr)):
# if arr[i] == arr[i + n]:
# return arr[i]
| [
"noreply@github.com"
] | adamqddnh.noreply@github.com |
1c1affc9dd98fd25a3edc099d6fa7cd07ff3fc6d | d8e8e528b1942b3528c88b12729f0cbc7b7d606f | /pipenv/vendor/dotenv/environ.py | ad3571656fba9a41d98bb9ce8d9e41dbd669291e | [
"MIT",
"BSD-3-Clause"
] | permissive | frostming/pipenv | 997e5f71ac5a4bbac3aacd1fa000da6e0c8161eb | 661184e5ccf9bec3e3b4b03af778e54fe2fbc1a2 | refs/heads/master | 2021-04-15T03:33:03.817473 | 2019-03-18T03:14:33 | 2019-03-18T03:14:33 | 126,263,945 | 1 | 1 | MIT | 2019-03-19T09:44:03 | 2018-03-22T01:48:41 | Python | UTF-8 | Python | false | false | 1,330 | py | import os
class UndefinedValueError(Exception):
pass
class Undefined(object):
"""Class to represent undefined type. """
pass
# Reference instance to represent undefined values
undefined = Undefined()
def _cast_boolean(value):
"""
Helper to convert config values to boolean as ConfigParser do.
"""
_BOOLEANS = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False}
value = str(value)
if value.lower() not in _BOOLEANS:
raise ValueError('Not a boolean: %s' % value)
return _BOOLEANS[value.lower()]
def getenv(option, default=undefined, cast=undefined):
"""
Return the value for option or default if defined.
"""
# We can't avoid __contains__ because value may be empty.
if option in os.environ:
value = os.environ[option]
else:
if isinstance(default, Undefined):
raise UndefinedValueError('{} not found. Declare it as envvar or define a default value.'.format(option))
value = default
if isinstance(cast, Undefined):
return value
if cast is bool:
value = _cast_boolean(value)
elif cast is list:
value = [x for x in value.split(',') if x]
else:
value = cast(value)
return value
| [
"dan@danryan.co"
] | dan@danryan.co |
4810fd2a4938fc0e455b6386bfde2aea67b9b938 | e573b586a921084f29a36f8e2de5afcae2c65ff8 | /tasks/part_3/mod_checker.py | e2b4f8d96c27b81ca4cbe1eb745fc2bc267870ef | [] | no_license | HannaKulba/AdaptiveTraining_English | e69c8a0c444c1fa72b4783ba837cb3d9dc055d91 | 46497dc6827df37f4ebb69671912ef5b934ab6f0 | refs/heads/master | 2020-12-28T15:05:25.762072 | 2020-02-19T14:39:22 | 2020-02-19T14:39:22 | 238,381,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | def mod_checker(x, mod=0):
return lambda y: y % x == mod if True else False
if __name__ == '__main__':
mod_3 = mod_checker(3)
print(mod_3(3)) # True
print(mod_3(4)) # False
mod_3_1 = mod_checker(3, 1)
print(mod_3_1(4)) # True
| [
"anna.mirraza@gmail.com"
] | anna.mirraza@gmail.com |
e7b20be2f99f8e0b7bbe52395f49c97165efe8e8 | f6c7dfc1c77c1e6b2460982925d76ee10f9eaa52 | /media.py | febe922488f1f760cf32c511af681171b211bae3 | [] | no_license | kevinzhuang10/Movie_Trailer_Website | f8bd104688f14c37a72b238b4497acbafde3c166 | e4e61efb27f43faf0cc2acab6ea8e95037333486 | refs/heads/master | 2021-01-22T04:18:14.908976 | 2017-02-13T05:07:28 | 2017-02-13T05:07:28 | 81,530,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | # Lesson 3.4: Make Classes
# Mini-Project: Movies Website
# In this file, you will define the class Movie. You could do this
# directly in entertainment_center.py but many developers keep their
# class definitions separate from the rest of their code. This also
# gives you practice importing Python files.
import webbrowser
class Movie():
# This class provides a way to store movie related information
'''class Movie takes input of title, image url and video url;
outputs Movie instance'''
def __init__(self, movie_title, movie_poster, movie_trailer):
# initialize instance of class Movie
print('Movie class constructor called')
self.title = movie_title
self.poster_image_url = movie_poster
self.trailer_youtube_url = movie_trailer
| [
"you@example.com"
] | you@example.com |
b966eccf30fa3b858d44755b73fe4bc3cff62c15 | 9a2726744fb12ebcbfd636a05fbd4cd36abfba7a | /config/settings/partials/INSTALLED_APPS.py | 32954a83dd7b1af16b8d0ece97f3628b518b2dc8 | [] | no_license | bartkim0426/drf_project_base | 7d34749269954022a11dfcfba99bc38e48a31ede | 7342c49fbf6da7a0fd5d22a6bfb172d2212d4b91 | refs/heads/master | 2021-08-22T19:32:30.522254 | 2017-12-01T03:00:11 | 2017-12-01T03:00:11 | 112,159,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | ADMIN_APPS = [
# for admin
'filer',
'suit',
'easy_thumbnails',
]
DJANGO_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
THIRD_PARTY_APPS = [
'rest_framework',
]
LOCAL_APPS = [
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
INSTALLED_APPS = ADMIN_APPS + DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
| [
"bartkim0426@gmail.com"
] | bartkim0426@gmail.com |
f17e721848f3121c7f9530cc5ef759327dfb25d7 | 6bb6c9f37e61325671a72e4c0b7bf1e6ec062aac | /testinfra/plugin.py | 220ddd6dbf9614e2b6754d01260a738d0a7c8897 | [
"Apache-2.0"
] | permissive | Unity-Technologies/testinfra | 19ef3aadddef7322c66978b6b9576f992fd16a81 | 58fa564f0a3178e9df26b26e2f7cb54aad433d3f | refs/heads/master | 2021-01-19T21:58:21.534575 | 2017-04-27T08:16:54 | 2017-04-27T08:16:54 | 88,728,308 | 1 | 3 | null | 2017-04-19T09:45:41 | 2017-04-19T09:45:41 | null | UTF-8 | Python | false | false | 3,825 | py | # coding: utf-8
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=redefined-outer-name
from __future__ import unicode_literals
import logging
import sys
import pytest
import testinfra
import testinfra.modules
def _generate_fixtures():
self = sys.modules[__name__]
for modname in testinfra.modules.modules:
def get_fixture(name):
@pytest.fixture()
def f(TestinfraBackend):
return TestinfraBackend.get_module(name)
f.__name__ = str(name)
f.__doc__ = ('https://testinfra.readthedocs.io/en/latest/'
'modules.html#{0}'.format(name.lower()))
return f
setattr(self, modname, get_fixture(modname))
_generate_fixtures()
@pytest.fixture()
def LocalCommand(TestinfraBackend):
"""Run commands locally
Same as `Command` but run commands locally with subprocess even
when the connection backend is not "local".
Note: `LocalCommand` does NOT respect ``--sudo`` option
"""
return testinfra.get_backend("local://").get_module("Command")
@pytest.fixture(scope="module")
def TestinfraBackend(_testinfra_backend):
return _testinfra_backend
def pytest_addoption(parser):
group = parser.getgroup("testinfra")
group.addoption(
"--connection",
action="store",
dest="connection",
help=(
"Remote connection backend (paramiko, ssh, safe-ssh, "
"salt, docker, ansible)"
)
)
group.addoption(
"--hosts",
action="store",
dest="hosts",
help="Hosts list (comma separated)",
)
group.addoption(
"--ssh-config",
action="store",
dest="ssh_config",
help="SSH config file",
)
group.addoption(
"--sudo",
action="store_true",
dest="sudo",
help="Use sudo",
)
group.addoption(
"--sudo-user",
action="store",
dest="sudo_user",
help="sudo user",
)
group.addoption(
"--ansible-inventory",
action="store",
dest="ansible_inventory",
help="Ansible inventory file",
)
group.addoption(
"--nagios",
action="store_true",
dest="nagios",
help="Nagios plugin",
)
def pytest_generate_tests(metafunc):
if "_testinfra_backend" in metafunc.fixturenames:
if metafunc.config.option.hosts is not None:
hosts = metafunc.config.option.hosts.split(",")
elif hasattr(metafunc.module, "testinfra_hosts"):
hosts = metafunc.module.testinfra_hosts
else:
hosts = [None]
params = testinfra.get_backends(
hosts,
connection=metafunc.config.option.connection,
ssh_config=metafunc.config.option.ssh_config,
sudo=metafunc.config.option.sudo,
sudo_user=metafunc.config.option.sudo_user,
ansible_inventory=metafunc.config.option.ansible_inventory,
)
ids = [e.get_pytest_id() for e in params]
metafunc.parametrize(
"_testinfra_backend", params, ids=ids, scope="module")
def pytest_configure(config):
if config.option.verbose > 1:
logging.basicConfig()
logging.getLogger("testinfra").setLevel(logging.DEBUG)
| [
"phil@philpep.org"
] | phil@philpep.org |
e2612c209edccfef435399a9d964423c08face7f | 98bf8263027c3cc6673cc4bd6189860092848bcd | /Livid_DS1_v2_x3/__init__.py | 9eb792ede46228249093840ab9c5064342e48242 | [] | no_license | aumhaa/aumhaa_commission | 09ae898ee5907721236ed2f97f76383ee8e25ae3 | 07ca157ae04e082059c583e4a8030cb73974e9e6 | refs/heads/master | 2022-10-09T11:40:31.332879 | 2022-09-11T03:32:43 | 2022-09-11T03:32:43 | 146,852,967 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 697 | py | # by amounra 0413 : http://www.aumhaa.com
from .DS1 import DS1
from _Framework.Capabilities import controller_id, inport, outport, CONTROLLER_ID_KEY, PORTS_KEY, HIDDEN, NOTES_CC, SCRIPT, REMOTE, SYNC, TYPE_KEY, FIRMWARE_KEY, AUTO_LOAD_KEY
def get_capabilities():
return {CONTROLLER_ID_KEY: controller_id(vendor_id=2536, product_ids=[115], model_name='Livid Instruments DS1'),
PORTS_KEY: [inport(props=[HIDDEN, NOTES_CC, SCRIPT, REMOTE]),
inport(props = []),
outport(props=[HIDDEN, NOTES_CC, SCRIPT, REMOTE]),
outport(props=[])],
TYPE_KEY: 'push',
AUTO_LOAD_KEY: False}
def create_instance(c_instance):
""" Creates and returns the DS1 script """
return DS1(c_instance)
| [
"aumhaa@gmail.com"
] | aumhaa@gmail.com |
5b1582e6cbdeeb188eb19d5c2349313222a6ba83 | da9b9f75a693d17102be45b88efc212ca6da4085 | /sdk/cosmos/azure-cosmos/azure/cosmos/diagnostics.py | 352a5a2f6b01787897e53ff55b1297a1adc2a5ce | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | elraikhm/azure-sdk-for-python | e1f57b2b4d8cc196fb04eb83d81022f50ff63db7 | dcb6fdd18b0d8e0f1d7b34fdf82b27a90ee8eafc | refs/heads/master | 2021-06-21T22:01:37.063647 | 2021-05-21T23:43:56 | 2021-05-21T23:43:56 | 216,855,069 | 0 | 0 | MIT | 2019-10-22T16:05:03 | 2019-10-22T16:05:02 | null | UTF-8 | Python | false | false | 2,688 | py | # The MIT License (MIT)
# Copyright (c) 2014 Microsoft Corporation
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Diagnostic tools for Cosmos
"""
from requests.structures import CaseInsensitiveDict
class RecordDiagnostics(object):
""" Record Response headers from Cosmos read operations.
The full response headers are stored in the ``headers`` property.
Examples:
>>> rh = RecordDiagnostics()
>>> col = b.create_container(
... id="some_container",
... partition_key=PartitionKey(path='/id', kind='Hash'),
... response_hook=rh)
>>> rh.headers['x-ms-activity-id']
'6243eeed-f06a-413d-b913-dcf8122d0642'
"""
_common = {
"x-ms-activity-id",
"x-ms-session-token",
"x-ms-item-count",
"x-ms-request-quota",
"x-ms-resource-usage",
"x-ms-retry-after-ms",
}
def __init__(self):
self._headers = CaseInsensitiveDict()
self._body = None
self._request_charge = 0
@property
def headers(self):
return CaseInsensitiveDict(self._headers)
@property
def body(self):
return self._body
@property
def request_charge(self):
return self._request_charge
def clear(self):
self._request_charge = 0
def __call__(self, headers, body):
self._headers = headers
self._body = body
self._request_charge += float(headers.get("x-ms-request-charge", 0))
def __getattr__(self, name):
key = "x-ms-" + name.replace("_", "-")
if key in self._common:
return self._headers[key]
raise AttributeError(name)
| [
"noreply@github.com"
] | elraikhm.noreply@github.com |
56aab3e9af3521f8ce8e38b522dbe37ecc3f5983 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-mpc/huaweicloudsdkmpc/v1/model/delete_animated_graphics_task_response.py | 66b1a3a8204409391bfea46679c083d766c979f4 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 2,487 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DeleteAnimatedGraphicsTaskResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""DeleteAnimatedGraphicsTaskResponse
The model defined in huaweicloud sdk
"""
super(DeleteAnimatedGraphicsTaskResponse, self).__init__()
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeleteAnimatedGraphicsTaskResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
fc7c905d94a3f8b1e6d92e5841566ba7f9041943 | 228ba698591ce49a0f220bf5acfc4bad67003172 | /weibo/pycharmspace/task/bonusRecognize/BonusRecognize.py | 8cd2b4009f00c9723bb507127f4c937d3176c08c | [] | no_license | HaxByCzy/python | 29e41ef95d713d26895c6f6611541c6487251756 | 3762fed0ea8a9ca63bfdfd70f8a2b62a15179ac0 | refs/heads/master | 2020-03-26T18:36:41.227656 | 2018-05-29T07:30:34 | 2018-05-29T07:30:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,943 | py | #-*- coding:utf-8 _*-
"""
--------------------------------------------------------------------
@function: 红包微博识别
@time: 2017-07-13
author:baoquan3
@version:
@modify:
--------------------------------------------------------------------
"""
import sys
from WeiboParser import WeiboParser
defaultencoding = 'utf-8'
if sys.getdefaultencoding() != defaultencoding:
reload(sys)
sys.setdefaultencoding(defaultencoding)
class Weibo(object):
"""
微博特征实例bean
"""
def __init__(self):
self.id = "" #微博id
self.url = "" #用户粉丝数
self.content = "" #微博内容
self.filter = "" #原创filter字段
class Bonus(WeiboParser):
"""
发红包微博识别
"""
id = "ID"
url = "URL"
content = "CONTENT"
filter = "FILTER"
titileTag = "#"
def processWeibo(self):
"""
根据一条微博相关特征,识别是否是属于发红包微博
:return:
"""
if len(self.tmpDict) > 0:
wb = Weibo()
if Bonus.id in self.tmpDict:
wb.id = self.tmpDict[Bonus.id]
if Bonus.url in self.tmpDict:
wb.url = self.tmpDict[Bonus.url]
if Bonus.content in self.tmpDict:
wb.content = self.tmpDict[Bonus.content]
if Bonus.filter in self.tmpDict:
wb.filter = self.tmpDict[Bonus.filter]
status = self.isBonus(wb)
if status:
self.outputReadWeibo(wb)
self.tmpDict.clear()
self.keyList = []
def isBonus(self, wb):
"""
根据微博特征,判断是否是发红包用户
:param wb: 微博特征
:return: true or false
"""
keyword = True if wb.content.find("红包") != -1 and wb.content.find("我") != -1 else False
if keyword:
hyperlink = self.hasHyperlink(wb.content)
if hyperlink:
title = self.hasTitle(wb.content)
if title:
orgin = False if int(wb.filter) & 4 != 0 else True
if orgin:
length = True if len(wb.content) > 10 else False
if length:
return True
else:
False
else:
return False
else:
return False
else:
return False
else:
return False
def hasTitle(self, line):
"""
根据微博内容判断是否包含话题
:param line: 微博内容
:return: true or false
"""
prefixIndex = line.find(Bonus.titileTag)
if prefixIndex != -1:
suffixIndex = line.find(Bonus.titileTag, prefixIndex + 1)
if suffixIndex != -1:
return True
else:
return False
def hasHyperlink(self, line):
"""
判断微博内容是否包含超链接
:param line: 微博内容
:return: true or false
"""
prefixIndex = line.find("<sina:link src=")
if prefixIndex != -1:
suffixIndex = line.find("/>", prefixIndex + 1)
if suffixIndex != -1:
return True
else:
return False
def outputReadWeibo(self, wb):
"""
输出已识别出发红包用户微博,以方便阅读输出格式输出
:param wb: 微博特征
:return:
"""
print "{0}\t{1}\t{2}\t{3}".format(wb.id,wb.filter, wb.url, wb.content)
if __name__ == "__main__":
inFile = "D://data//wbTest.dat"
bonus = Bonus()
with open(inFile,"r") as f:
for line in f:
bonus.processOneRecord(line.strip())
bonus.flush() | [
"tszhbaoquan@163.com"
] | tszhbaoquan@163.com |
c4150bd16ec216344b75814082beff1bc17cdcc2 | d93159d0784fc489a5066d3ee592e6c9563b228b | /RecoLocalCalo/HcalRecProducers/python/HFUpgradeReconstructor_cfi.py | 41371cc1ba7909ad4fc48d8759d1f0f795097d96 | [] | permissive | simonecid/cmssw | 86396e31d41a003a179690f8c322e82e250e33b2 | 2559fdc9545b2c7e337f5113b231025106dd22ab | refs/heads/CAallInOne_81X | 2021-08-15T23:25:02.901905 | 2016-09-13T08:10:20 | 2016-09-13T08:53:42 | 176,462,898 | 0 | 1 | Apache-2.0 | 2019-03-19T08:30:28 | 2019-03-19T08:30:24 | null | UTF-8 | Python | false | false | 500 | py | import FWCore.ParameterSet.Config as cms
hfUpgradeReco = cms.EDProducer("HcalSimpleReconstructor",
correctionPhaseNS = cms.double(0.0),
digiLabel = cms.InputTag("simHcalUnsuppressedDigis","HFUpgradeDigiCollection"),
Subdetector = cms.string('upgradeHF'),
correctForPhaseContainment = cms.bool(False),
correctForTimeslew = cms.bool(False),
dropZSmarkedPassed = cms.bool(True),
firstSample = cms.int32(2),
samplesToAdd = cms.int32(1),
tsFromDB = cms.bool(True)
)
| [
"giulio.eulisse@gmail.com"
] | giulio.eulisse@gmail.com |
87bc3f1a5823b448754c8dc4c28a12e411d905c5 | 3e7a8c5630de986a4d02011b1bd368c041f3f477 | /作图/利用pyecharts制作柱状图.py | 4748a756419e67069fd49d9794be5896276551f3 | [] | no_license | gswyhq/hello-world | b9ef715f80d2b39c8efaa1aa2eb18a6257e26218 | b1ab053a05e1f8c63b8b04d6904a3cdca450bd9f | refs/heads/master | 2023-05-26T13:15:36.788620 | 2023-05-19T13:38:50 | 2023-05-19T13:38:50 | 158,821,148 | 16 | 6 | null | 2021-03-19T02:59:48 | 2018-11-23T11:04:43 | Python | UTF-8 | Python | false | false | 2,317 | py | #!/usr/bin/python3
# coding: utf-8
自从 v0.3.2 开始,为了缩减项目本身的体积以及维持 pyecharts 项目的轻量化运行,pyecharts 将不再自带地图 js 文件。如用户需要用到地图图表,可自行安装对应的地图文件包。下面介绍如何安装。
全球国家地图: echarts-countries-pypkg (1.9MB): 世界地图和 213 个国家,包括中国地图
中国省级地图: echarts-china-provinces-pypkg (730KB):23 个省,5 个自治区
中国市级地图: echarts-china-cities-pypkg (3.8MB):370 个中国城市
中国县区级地图: echarts-china-counties-pypkg (4.1MB):2882 个中国县·区
中国区域地图: echarts-china-misc-pypkg (148KB):11 个中国区域地图,比如华南、华北。
选择自己需要的安装
pip3 install echarts-countries-pypkg
pip3 install echarts-china-provinces-pypkg
pip3 install echarts-china-cities-pypkg
pip3 install echarts-china-counties-pypkg
pip3 install echarts-china-misc-pypkg
pip3 install echarts-united-kingdom-pypkg
from pyecharts.charts import Bar
from pyecharts import options as opts
# 生成 HTML
bar = (
Bar()
.add_xaxis(["衬衫", "毛衣", "领带", "裤子", "风衣", "高跟鞋", "袜子"])
.add_yaxis("商家A", [114, 55, 27, 101, 125, 27, 105])
.add_yaxis("商家B", [57, 134, 137, 129, 145, 60, 49])
.set_global_opts(title_opts=opts.TitleOpts(title="某商场销售情况"))
)
bar.render(path="render2.html")
# 生成图片
from snapshot_selenium import snapshot as driver
from pyecharts import options as opts
from pyecharts.charts import Bar
from pyecharts.render import make_snapshot
def bar_chart() -> Bar:
c = (
Bar()
.add_xaxis(["衬衫", "毛衣", "领带", "裤子", "风衣", "高跟鞋", "袜子"])
.add_yaxis("商家A", [114, 55, 27, 101, 125, 27, 105])
.add_yaxis("商家B", [57, 134, 137, 129, 145, 60, 49])
.reversal_axis()
.set_series_opts(label_opts=opts.LabelOpts(position="right"))
.set_global_opts(title_opts=opts.TitleOpts(title="Bar-测试渲染图片"))
)
return c
# 需要安装 snapshot_selenium; 当然也可以使用: snapshot_phantomjs
make_snapshot(driver, bar_chart().render(path="render2.html"), "bar.png")
def main():
pass
if __name__ == '__main__':
main()
| [
"gswyhq@126.com"
] | gswyhq@126.com |
aa30c3ca06c891c085bb4fa75f45ef0510c91188 | 0c787f10f5fc666e275e01954f430cb894f1fa65 | /__init__.py | 30e33671ae1fa68e0ba341441138f3fd7beb1a7b | [] | no_license | marcinowski/web-archive | db391c32c2cf05bbcee1abee2825b8bb1295f86f | ea590e6699d97c2554e15e2c7598fb8b30a3f292 | refs/heads/master | 2021-01-20T02:31:25.147941 | 2017-04-26T23:17:21 | 2017-04-26T23:17:21 | 89,422,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,006 | py | """
:created on: 2017-04-24
:author: Marcin Muszynski
:contact: marcinowski007@gmail.com
UseCase: pass url and a path -> generate one compiled html with css classes
"""
import requests
from lxml import etree
from urllib.parse import urlparse
class WebArchiveException(Exception):
""""""
class WebArchive(object):
def __init__(self, url, path):
self.url = url
self.path = path
self.session = requests.Session()
self.html = self._get_content(self.url)
self.tree = etree.HTML(self.html)
self.url_parsed = urlparse(self.url)
self.result_html = ''
self.css_styles = ''
self.js_scripts = ''
def save(self):
self._handle_links()
self._handle_styles()
self._handle_scripts()
self._insert_elements()
self._save_file()
def _handle_links(self):
for link in self.tree.iter(tag='link'):
if link.get('rel') == 'stylesheet':
url = self._resolve_url(link.get('href'))
self.css_styles += self._get_content(url)
link.getparent().remove(link)
def _handle_styles(self):
for style in self.tree.iter(tag='style'):
self.css_styles += style.text or ''
style.getparent().remove(style)
def _handle_scripts(self):
for script in self.tree.iter(tag='script'):
src = script.get('src')
if not src:
self.js_scripts += script.text or ''
else:
url = self._resolve_url(src)
self.js_scripts += self._get_content(url)
script.getparent().remove(script)
def _insert_elements(self):
self._insert_element('style', self.css_styles)
self._insert_element('script', self.js_scripts)
def _insert_element(self, _tag, content):
tag = etree.Element(_tag)
tag.text = content
self.tree.find('head').append(tag)
def _save_file(self):
with open(self.path, 'wb') as f:
f.write(etree.tostring(self.tree))
def _resolve_url(self, url):
if url.startswith('http'): # absolute url
return url
if url.startswith('//'): # schema relative
return self.url_parsed.schema + ":" + url
if url.starstwith('/'): # relative url
page_root = self.url_parsed.schema + self.url_parsed.netloc
return page_root + url
bwd_path = url.count('../')
base = list(self.tree.iter(tag='base')) # 0 or 1
base_url = base[0].get('href') if base else self.url
return base_url.rsplit('/', bwd_path)[0] + '/' + url
def _get_content(self, url):
try:
return self._get_response(url)
except requests.RequestException:
raise WebArchiveException("Message")
except AttributeError:
raise WebArchiveException("Message")
def _get_response(self, url):
return self.session.get(url).content.decode()
| [
"muszynskimarcin@wp.pl"
] | muszynskimarcin@wp.pl |
67f38c3e694291ed8a8e71da84c20b48d5d4aa3a | 6543b77d0f483be2eb57b4e5e94928c6bff8eed9 | /venv/Scripts/pip3-script.py | bccaaa75db6144c30e115e29250c87f76933bbac | [] | no_license | aswini1414/sampleproject | 0876daaea3195f0a2245a66a4f023927ffdbb433 | 5e225ac796bf5508d2c2c646ed94a764b021fce8 | refs/heads/master | 2020-06-18T14:50:19.580453 | 2019-07-08T04:13:48 | 2019-07-08T04:13:48 | 196,337,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | #!C:\Users\LOKESH\PycharmProjects\sampleproject\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==9.0.1','console_scripts','pip3'
__requires__ = 'pip==9.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==9.0.1', 'console_scripts', 'pip3')()
)
| [
"lokesh.rapala"
] | lokesh.rapala |
5b24b27095e2a964135f369dc7b99aa2c27fa227 | cca3f6a0accb18760bb134558fea98bb87a74806 | /abc150/C/main.py | d1f234e48a0bc6c9180201d774917dc7eedfb1d2 | [] | no_license | Hashizu/atcoder_work | 5ec48cc1147535f8b9d0f0455fd110536d9f27ea | cda1d9ac0fcd56697ee5db93d26602dd8ccee9df | refs/heads/master | 2023-07-15T02:22:31.995451 | 2021-09-03T12:10:57 | 2021-09-03T12:10:57 | 382,987,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | py | #!/usr/bin/env python3
import sys
import math
def solve(N: int, P: "List[int]", Q: "List[int]"):
cnta = 0
cntb = 0
for i, x in enumerate(P):
p_ = x-1
for y in range(i):
if P[y] < x : p_-=1
cnta += p_ * math.factorial(N - 1 - i)
for i, x in enumerate(Q):
p_ = x-1
for y in range(i):
if Q[y] < x : p_-=1
cntb += p_ * math.factorial(N - 1 - i)
# print(cnta, cntb)
print(abs(cnta-cntb))
return
# Generated by 1.1.6 https://github.com/kyuridenamida/atcoder-tools (tips: You use the default template now. You can remove this line by using your custom template)
def main():
def iterate_tokens():
for line in sys.stdin:
for word in line.split():
yield word
tokens = iterate_tokens()
N = int(next(tokens)) # type: int
P = [int(next(tokens)) for _ in range(N)] # type: "List[int]"
Q = [int(next(tokens)) for _ in range(N)] # type: "List[int]"
solve(N, P, Q)
if __name__ == '__main__':
main()
| [
"athenenoctus@gmail.com"
] | athenenoctus@gmail.com |
2b22230e217dc0842401a44331e89e1f83afc119 | ed08464df91da89363f7ae4011a5418db9769767 | /official courses/month01/day03/exercise02.py | 9cdb6265ab935680fe945a807bbce2a5e1be649a | [] | no_license | lhd0320/AID2003 | ece8399d137226b699e9a3606cca2dc8e3b09bab | 9b572a5039e68a1d21b41edf3be5e6e42c098cd6 | refs/heads/master | 2022-08-19T05:21:56.175803 | 2020-05-25T03:10:46 | 2020-05-25T03:10:46 | 266,663,676 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | year = int(input("请输入年份:"))
print(year % 4 == 0 and year % 100 != 0 or year % 400 == 0)
| [
"706440631@qq.com"
] | 706440631@qq.com |
eb4cf0a9a96c1f9e3d5610478b4b25452ee384db | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/client/gui/Scaleform/locale/NATIONS.py | 342d5f81bc76cdde4e0867e06d1e5cc393033665 | [] | no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,147 | py | # 2017.05.04 15:25:08 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/Scaleform/locale/NATIONS.py
"""
This file was generated using the wgpygen.
Please, don't edit this file manually.
"""
from debug_utils import LOG_WARNING
class NATIONS(object):
USSR = '#nations:ussr'
GERMANY = '#nations:germany'
USA = '#nations:usa'
FRANCE = '#nations:france'
UK = '#nations:uk'
JAPAN = '#nations:japan'
CZECH = '#nations:czech'
CHINA = '#nations:china'
SWEDEN = '#nations:sweden'
ALL_ENUM = (USSR,
GERMANY,
USA,
FRANCE,
UK,
JAPAN,
CZECH,
CHINA,
SWEDEN)
@classmethod
def all(cls, key0):
outcome = '#nations:{}'.format(key0)
if outcome not in cls.ALL_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\locale\NATIONS.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:25:08 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
fcecd995ff300d8a765f6450f92bc9160b5af1bf | 4f0beffcf200dd0b42cc192a4a2b9c2d0f0a2a42 | /rewrite/terms.py | e8441a0d6416715bc03c0303ba9a3a0f8c34b926 | [
"BSD-2-Clause"
] | permissive | rebcabin/pyrewrite | d11a2385920a8b375e9f3f7a4cc81ed8a178cf28 | 226d139ff361b095d46a58c65bc345cd18443008 | refs/heads/master | 2020-03-26T00:09:40.835150 | 2013-02-13T16:46:24 | 2013-02-13T16:46:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,345 | py | #------------------------------------------------------------------------
# Terms
#------------------------------------------------------------------------
class ATerm(object):
def __init__(self, term, annotation=None):
self.term = term
self.annotation = annotation
def __str__(self):
if self.annotation is not None:
return str(self.term) + arepr([self.annotation], '{', '}')
else:
return str(self.term)
def __eq__(self, other):
if isinstance(other, ATerm):
return self.term == other.term
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
def __show__(self):
pass
class AAppl(object):
def __init__(self, spine, args):
assert isinstance(spine, ATerm)
self.spine = spine
self.args = args
def __eq__(self, other):
if isinstance(other, AAppl):
return self.spine == other.spine and self.args == other.args
else:
return False
def __ne__(self, other):
return not self == other
def __str__(self):
return str(self.spine) + arepr(self.args, '(', ')')
def __repr__(self):
return str(self)
def __show__(self):
pass
class AString(object):
def __init__(self, val):
assert isinstance(val, str)
self.val = val
def __str__(self):
return '"%s"' % (self.val)
def __repr__(self):
return str(self)
class AInt(object):
def __init__(self, val):
self.val = val
def __str__(self):
return str(self.val)
def __eq__(self, other):
if isinstance(other, AInt):
return self.val == other.val
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
class AReal(object):
def __init__(self, val):
self.val = val
def __str__(self):
return str(self.val)
def __eq__(self, other):
if isinstance(other, AReal):
return self.val == other.val
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
class AList(object):
def __init__(self, args):
assert isinstance(args, list)
self.args = args or []
def __str__(self):
return arepr(self.args, '[', ']')
def __eq__(self, other):
if isinstance(other, AList):
return self.args == other.args
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
class ATuple(object):
def __init__(self, args):
assert isinstance(args, list)
self.args = args or []
def __eq__(self, other):
if isinstance(other, ATuple):
return self.args == other.args
else:
return False
def __ne__(self, other):
return not self == other
def __str__(self):
return arepr(self.args, '(', ')')
def __repr__(self):
return str(self)
class APlaceholder(object):
def __init__(self, type, args):
self.type = type
self.args = args
def __str__(self):
if self.args is not None:
return '<%s(%r)>' % (self.type, self.args)
else:
return arepr([self.type], '<', '>')
def __repr__(self):
return str(self)
#------------------------------------------------------------------------
# Pretty Printing
#------------------------------------------------------------------------
def arepr(terms, l, r):
""" Concatenate str representations with commas and left and right
characters. """
return l + ', '.join(map(str, terms)) + r
#------------------------------------------------------------------------
# Compatability
#------------------------------------------------------------------------
# Goal is to only define the protocol that these need to conform
# to and then allow pluggable backends.
# - ATerm
# - Python AST
# - SymPy
aterm = ATerm
aappl = AAppl
aint = AInt
astr = AString
areal = AReal
atupl = ATuple
alist = AList
aplaceholder = APlaceholder
| [
"stephen.m.diehl@gmail.com"
] | stephen.m.diehl@gmail.com |
281a87d35d5eb6fe57c64267ba9cbeafccc7c2ca | 35573eebb47cf243f04e1125725920b252de6261 | /python/ray/data/datasource/parquet_datasource.py | f47bf3be81ea1988c39df7590fe5731bdf660123 | [
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | timgates42/ray | 05df4fe47a16a028c92fea106c4a2b316665001d | cd95569b014cf06479b96875c63eba171e92ec97 | refs/heads/master | 2023-03-18T00:59:27.757929 | 2022-07-13T21:57:24 | 2022-07-13T21:57:24 | 227,523,984 | 0 | 0 | Apache-2.0 | 2019-12-12T05:01:58 | 2019-12-12T05:01:57 | null | UTF-8 | Python | false | false | 11,986 | py | import itertools
import logging
from typing import TYPE_CHECKING, Callable, Iterator, List, Optional, Union
import numpy as np
from ray.data._internal.output_buffer import BlockOutputBuffer
from ray.data._internal.progress_bar import ProgressBar
from ray.data._internal.remote_fn import cached_remote_fn
from ray.data._internal.util import _check_pyarrow_version
from ray.data.block import Block
from ray.data.context import DatasetContext
from ray.data.datasource.datasource import Reader, ReadTask
from ray.data.datasource.file_based_datasource import _resolve_paths_and_filesystem
from ray.data.datasource.file_meta_provider import (
DefaultParquetMetadataProvider,
ParquetMetadataProvider,
)
from ray.data.datasource.parquet_base_datasource import ParquetBaseDatasource
from ray.types import ObjectRef
from ray.util.annotations import PublicAPI
import ray.cloudpickle as cloudpickle
if TYPE_CHECKING:
import pyarrow
from pyarrow.dataset import ParquetFileFragment
logger = logging.getLogger(__name__)
PIECES_PER_META_FETCH = 6
PARALLELIZE_META_FETCH_THRESHOLD = 24
# The number of rows to read per batch. This is sized to generate 10MiB batches
# for rows about 1KiB in size.
PARQUET_READER_ROW_BATCH_SIZE = 100000
FILE_READING_RETRY = 8
# TODO(ekl) this is a workaround for a pyarrow serialization bug, where serializing a
# raw pyarrow file fragment causes S3 network calls.
class _SerializedPiece:
def __init__(self, frag: "ParquetFileFragment"):
self._data = cloudpickle.dumps(
(frag.format, frag.path, frag.filesystem, frag.partition_expression)
)
def deserialize(self) -> "ParquetFileFragment":
# Implicitly trigger S3 subsystem initialization by importing
# pyarrow.fs.
import pyarrow.fs # noqa: F401
(file_format, path, filesystem, partition_expression) = cloudpickle.loads(
self._data
)
return file_format.make_fragment(path, filesystem, partition_expression)
# Visible for test mocking.
def _deserialize_pieces(
serialized_pieces: List[_SerializedPiece],
) -> List["pyarrow._dataset.ParquetFileFragment"]:
return [p.deserialize() for p in serialized_pieces]
# This retry helps when the upstream datasource is not able to handle
# overloaded read request or failed with some retriable failures.
# For example when reading data from HA hdfs service, hdfs might
# lose connection for some unknown reason expecially when
# simutaneously running many hyper parameter tuning jobs
# with ray.data parallelism setting at high value like the default 200
# Such connection failure can be restored with some waiting and retry.
def _deserialize_pieces_with_retry(
serialized_pieces: List[_SerializedPiece],
) -> List["pyarrow._dataset.ParquetFileFragment"]:
min_interval = 0
final_exception = None
for i in range(FILE_READING_RETRY):
try:
return _deserialize_pieces(serialized_pieces)
except Exception as e:
import random
import time
retry_timing = (
""
if i == FILE_READING_RETRY - 1
else (f"Retry after {min_interval} sec. ")
)
log_only_show_in_1st_retry = (
""
if i
else (
f"If earlier read attempt threw certain Exception"
f", it may or may not be an issue depends on these retries "
f"succeed or not. serialized_pieces:{serialized_pieces}"
)
)
logger.exception(
f"{i + 1}th attempt to deserialize ParquetFileFragment failed. "
f"{retry_timing}"
f"{log_only_show_in_1st_retry}"
)
if not min_interval:
# to make retries of different process hit hdfs server
# at slightly different time
min_interval = 1 + random.random()
# exponential backoff at
# 1, 2, 4, 8, 16, 32, 64
time.sleep(min_interval)
min_interval = min_interval * 2
final_exception = e
raise final_exception
@PublicAPI
class ParquetDatasource(ParquetBaseDatasource):
"""Parquet datasource, for reading and writing Parquet files.
The primary difference from ParquetBaseDatasource is that this uses
PyArrow's `ParquetDataset` abstraction for dataset reads, and thus offers
automatic Arrow dataset schema inference and row count collection at the
cost of some potential performance and/or compatibility penalties.
Examples:
>>> import ray
>>> from ray.data.datasource import ParquetDatasource
>>> source = ParquetDatasource() # doctest: +SKIP
>>> ray.data.read_datasource( # doctest: +SKIP
... source, paths="/path/to/dir").take()
[{"a": 1, "b": "foo"}, ...]
"""
def create_reader(self, **kwargs):
return _ParquetDatasourceReader(**kwargs)
class _ParquetDatasourceReader(Reader):
def __init__(
self,
paths: Union[str, List[str]],
filesystem: Optional["pyarrow.fs.FileSystem"] = None,
columns: Optional[List[str]] = None,
schema: Optional[Union[type, "pyarrow.lib.Schema"]] = None,
meta_provider: ParquetMetadataProvider = DefaultParquetMetadataProvider(),
_block_udf: Optional[Callable[[Block], Block]] = None,
**reader_args,
):
_check_pyarrow_version()
import pyarrow as pa
import pyarrow.parquet as pq
paths, filesystem = _resolve_paths_and_filesystem(paths, filesystem)
if len(paths) == 1:
paths = paths[0]
dataset_kwargs = reader_args.pop("dataset_kwargs", {})
pq_ds = pq.ParquetDataset(
paths, **dataset_kwargs, filesystem=filesystem, use_legacy_dataset=False
)
if schema is None:
schema = pq_ds.schema
if columns:
schema = pa.schema(
[schema.field(column) for column in columns], schema.metadata
)
if _block_udf is not None:
# Try to infer dataset schema by passing dummy table through UDF.
dummy_table = schema.empty_table()
try:
inferred_schema = _block_udf(dummy_table).schema
inferred_schema = inferred_schema.with_metadata(schema.metadata)
except Exception:
logger.debug(
"Failed to infer schema of dataset by passing dummy table "
"through UDF due to the following exception:",
exc_info=True,
)
inferred_schema = schema
else:
inferred_schema = schema
self._metadata = meta_provider.prefetch_file_metadata(pq_ds.pieces) or []
self._pq_ds = pq_ds
self._meta_provider = meta_provider
self._inferred_schema = inferred_schema
self._block_udf = _block_udf
self._reader_args = reader_args
self._columns = columns
self._schema = schema
def estimate_inmemory_data_size(self) -> Optional[int]:
# TODO(ekl) better estimate the in-memory size here.
PARQUET_DECOMPRESSION_MULTIPLIER = 5
total_size = 0
for meta in self._metadata:
total_size += meta.serialized_size
return total_size * PARQUET_DECOMPRESSION_MULTIPLIER
def get_read_tasks(self, parallelism: int) -> List[ReadTask]:
# NOTE: We override the base class FileBasedDatasource.get_read_tasks()
# method in order to leverage pyarrow's ParquetDataset abstraction,
# which simplifies partitioning logic. We still use
# FileBasedDatasource's write side (do_write), however.
read_tasks = []
for pieces, metadata in zip(
np.array_split(self._pq_ds.pieces, parallelism),
np.array_split(self._metadata, parallelism),
):
if len(pieces) <= 0:
continue
serialized_pieces = [_SerializedPiece(p) for p in pieces]
input_files = [p.path for p in pieces]
meta = self._meta_provider(
input_files,
self._inferred_schema,
pieces=pieces,
prefetched_metadata=metadata,
)
block_udf, reader_args, columns, schema = (
self._block_udf,
self._reader_args,
self._columns,
self._schema,
)
read_tasks.append(
ReadTask(
lambda p=serialized_pieces: _read_pieces(
block_udf,
reader_args,
columns,
schema,
p,
),
meta,
)
)
return read_tasks
def _read_pieces(
block_udf, reader_args, columns, schema, serialized_pieces: List[_SerializedPiece]
) -> Iterator["pyarrow.Table"]:
# Deserialize after loading the filesystem class.
pieces: List[
"pyarrow._dataset.ParquetFileFragment"
] = _deserialize_pieces_with_retry(serialized_pieces)
# Ensure that we're reading at least one dataset fragment.
assert len(pieces) > 0
import pyarrow as pa
from pyarrow.dataset import _get_partition_keys
ctx = DatasetContext.get_current()
output_buffer = BlockOutputBuffer(
block_udf=block_udf,
target_max_block_size=ctx.target_max_block_size,
)
logger.debug(f"Reading {len(pieces)} parquet pieces")
use_threads = reader_args.pop("use_threads", False)
for piece in pieces:
part = _get_partition_keys(piece.partition_expression)
batches = piece.to_batches(
use_threads=use_threads,
columns=columns,
schema=schema,
batch_size=PARQUET_READER_ROW_BATCH_SIZE,
**reader_args,
)
for batch in batches:
table = pa.Table.from_batches([batch], schema=schema)
if part:
for col, value in part.items():
table = table.set_column(
table.schema.get_field_index(col),
col,
pa.array([value] * len(table)),
)
# If the table is empty, drop it.
if table.num_rows > 0:
output_buffer.add_block(table)
if output_buffer.has_next():
yield output_buffer.next()
output_buffer.finalize()
if output_buffer.has_next():
yield output_buffer.next()
def _fetch_metadata_remotely(
pieces: List["pyarrow._dataset.ParquetFileFragment"],
) -> List[ObjectRef["pyarrow.parquet.FileMetaData"]]:
remote_fetch_metadata = cached_remote_fn(_fetch_metadata_serialization_wrapper)
metas = []
parallelism = min(len(pieces) // PIECES_PER_META_FETCH, 100)
meta_fetch_bar = ProgressBar("Metadata Fetch Progress", total=parallelism)
for pcs in np.array_split(pieces, parallelism):
if len(pcs) == 0:
continue
metas.append(remote_fetch_metadata.remote([_SerializedPiece(p) for p in pcs]))
metas = meta_fetch_bar.fetch_until_complete(metas)
return list(itertools.chain.from_iterable(metas))
def _fetch_metadata_serialization_wrapper(
pieces: str,
) -> List["pyarrow.parquet.FileMetaData"]:
pieces: List[
"pyarrow._dataset.ParquetFileFragment"
] = _deserialize_pieces_with_retry(pieces)
return _fetch_metadata(pieces)
def _fetch_metadata(
pieces: List["pyarrow.dataset.ParquetFileFragment"],
) -> List["pyarrow.parquet.FileMetaData"]:
piece_metadata = []
for p in pieces:
try:
piece_metadata.append(p.metadata)
except AttributeError:
break
return piece_metadata
| [
"noreply@github.com"
] | timgates42.noreply@github.com |
3bbbe9362f8ac92757302164942546a4e7c30a17 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/4/kkm.py | 085664d06f9dfc060451e08261da39402b8935b3 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'kKM':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
719faa299021b6df1bc12e54e26cdac10f9dd376 | 5b9b2ec5fb3142609882a3320c6e64c6b912395c | /GfG/Mathematical Problems/fibGoldenRatio.py | 0e52e95781f08c6731859bed0821762acff507e2 | [] | no_license | anildhaker/DailyCodingChallenge | 459ba7ba968f4394fb633d6ba8b749c1e4cb7fb0 | f1cfc52f156436dc7c0a6c43fa939cefac5cee36 | refs/heads/master | 2020-04-20T18:58:33.133225 | 2020-01-11T14:50:52 | 2020-01-11T14:50:52 | 169,036,874 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | # Nth fib number using Golden ratio.
# nth fib = (n-1)th fib * golden ratio
# round up each stage to get the correct result.
Phi = 1.6180339
f = [0, 1, 1, 2, 3, 5]
def fib(n):
if n < 6:
return f[n]
t = 5
tn = 5
while t < n:
tn = round(tn * Phi)
print(t)
t += 1
return tn
n = 9
print(n, "th Fibonacci Number =", fib(n)) | [
"anildhaker777@gmail.com"
] | anildhaker777@gmail.com |
80d5cbec5df9333fbfb7da16a4a745cc46197a3c | 42c815151def498244ad0f6c01bc217eab955b8a | /tools/collect_data_stat.py | 2aa88b23aceb155e010f72497e8c34e297b9f640 | [
"Apache-2.0"
] | permissive | openvinotoolkit/mmaction2 | 5636577a420eafffeb2338c00a7a6e9f3bf8248a | 496cec37a4bae9b3d10c9d8fa46b244e4310b2fe | refs/heads/ote | 2023-07-16T13:22:55.477849 | 2021-09-01T20:42:59 | 2021-09-01T20:42:59 | 300,213,346 | 5 | 4 | NOASSERTION | 2021-09-01T20:43:00 | 2020-10-01T08:59:04 | Python | UTF-8 | Python | false | false | 4,497 | py | # Copyright (C) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
import sys
import argparse
import numpy as np
import mmcv
from mmaction.datasets import build_dataloader, build_dataset
from mmaction.utils import ExtendedDictAction
from mmaction.core.utils import propagate_root_dir
from mmcv.runner import set_random_seed
def update_config(cfg, args):
if args.num_workers is not None and args.num_workers > 0:
cfg.data.workers_per_gpu = args.num_workers
cfg.data.test.test_mode = True
normalize_idx = [i for i, v in enumerate(cfg.data.test.pipeline) if v['type'] == 'Normalize'][0]
cfg.data.test.pipeline[normalize_idx]['mean'] = [0.0, 0.0, 0.0]
cfg.data.test.pipeline[normalize_idx]['std'] = [1.0, 1.0, 1.0]
cfg.data.test.pipeline[normalize_idx]['to_bgr'] = False
return cfg
def merge_configs(cfg1, cfg2):
# Merge cfg2 into cfg1
# Overwrite cfg1 if repeated, ignore if value is None.
cfg1 = {} if cfg1 is None else cfg1.copy()
cfg2 = {} if cfg2 is None else cfg2
for k, v in cfg2.items():
if v:
cfg1[k] = v
return cfg1
def collect_stat(data_loader):
mean_data, std_data = [], []
progress_bar = mmcv.ProgressBar(len(data_loader.dataset))
for data in data_loader:
input_data = data['imgs'].detach().squeeze().cpu().numpy()
mean_data.append(np.mean(input_data, axis=(2, 3, 4)))
std_data.append(np.std(input_data, axis=(2, 3, 4)))
batch_size = len(input_data)
for _ in range(batch_size):
progress_bar.update()
mean_data = np.concatenate(mean_data, axis=0)
std_data = np.concatenate(std_data, axis=0)
return mean_data, std_data
def filter_stat(mean_data, std_data, min_value=1.0):
mask = np.all(mean_data > min_value, axis=1) & np.all(std_data > min_value, axis=1)
return mean_data[mask], std_data[mask]
def dump_stat(mean_data, std_data, out_filepath):
assert mean_data.shape == std_data.shape
with open(out_filepath, 'w') as output_stream:
for mean_value, std_value in zip(mean_data, std_data):
mean_value_str = ','.join(str(v) for v in mean_value)
std_value_str = ','.join(str(v) for v in std_value)
output_stream.write(f'{mean_value_str} {std_value_str}\n')
def parse_args():
parser = argparse.ArgumentParser(description='Test model deployed to ONNX or OpenVINO')
parser.add_argument('config', help='path to configuration file')
parser.add_argument('out', help='path to save stat')
parser.add_argument('--data_dir', type=str,
help='the dir with dataset')
parser.add_argument('--num_workers', type=int,
help='number of CPU workers per GPU')
parser.add_argument('--update_config', nargs='+', action=ExtendedDictAction,
help='Update configuration file by parameters specified here.')
args = parser.parse_args()
return args
def main(args):
# load config
cfg = mmcv.Config.fromfile(args.config)
if args.update_config is not None:
cfg.merge_from_dict(args.update_config)
cfg = update_config(cfg, args)
cfg = propagate_root_dir(cfg, args.data_dir)
if cfg.get('seed'):
print(f'Set random seed to {cfg.seed}')
set_random_seed(cfg.seed)
# build the dataset
dataset = build_dataset(cfg.data, 'test', dict(test_mode=True))
print(f'Test datasets:\n{str(dataset)}')
# build the dataloader
data_loader = build_dataloader(
dataset,
videos_per_gpu=20,
workers_per_gpu=cfg.data.workers_per_gpu,
dist=False,
shuffle=False
)
# collect results
mean_data, std_data = collect_stat(data_loader)
# filter data
mean_data, std_data = filter_stat(mean_data, std_data, min_value=1.0)
# dump stat
dump_stat(mean_data, std_data, args.out)
if __name__ == '__main__':
args = parse_args()
sys.exit(main(args) or 0) | [
"evgeny.izutov@intel.com"
] | evgeny.izutov@intel.com |
298bf01200ac8b294e118cbe01d81b9d7e99da96 | 35e41dab4a3e8eebe17872f5414cf3efb86be54c | /math/0x03-probability/exponential.py | cdca021ebc6f68ec512c14e54e7dd7aecbf85047 | [] | no_license | yasheymateen/holbertonschool-machine_learning | 52eaf217e924404e8833e75e8b48e80807d784dc | 1329ba88bf9ae50693188e1c85383d9b76c2b1f4 | refs/heads/master | 2022-12-24T13:51:02.743526 | 2020-10-08T06:59:04 | 2020-10-08T06:59:04 | 255,277,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,017 | py | #!/usr/bin/env python3
""" Exponential distribution class """
class Exponential:
""" Exponential class """
def __init__(self, data=None, lambtha=1.):
""" initial class constructor """
if data is None:
if lambtha <= 0:
raise ValueError("lambtha must be a positive value")
self.lambtha = float(lambtha)
else:
if type(data) is not list:
raise TypeError("data must be a list")
if len(data) < 2:
raise ValueError("data must contain multiple values")
self.lambtha = len(data) / sum(data)
def pdf(self, x):
""" Return pdf of exp distribution of x """
if x < 0:
return 0
return self.lambtha * pow(2.7182818285, -1 *
self.lambtha * x)
def cdf(self, x):
""" Return cdf of exp distribution of x """
if x < 0:
return 0
return 1 - pow(2.7182818285, -1 * self.lambtha * x)
| [
"yasheymateen@gmail.com"
] | yasheymateen@gmail.com |
c07e24422565117f12a61cbaf45b486534d7ab97 | 88906fbe13de27413a51da917ebe46b473bec1b9 | /Part-I/Chapter 9 - Classes/user_class.py | 52ac6d4c5bd85b5435ecd806761bceed66dabb9d | [] | no_license | lonewolfcub/Python-Crash-Course | 0b127e40f5029d84ad036263fd9153f6c88c2420 | 322388dfb81f3335eeffabcdfb8f9c5a1db737a4 | refs/heads/master | 2021-01-01T16:45:50.617189 | 2017-10-27T14:23:58 | 2017-10-27T14:23:58 | 97,911,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,598 | py | """A class to model user attributes"""
class User:
"""A simple attempt to model a user"""
def __init__(self, first_name, last_name, username, employee_number,
employee_type, email, password, admin, banned):
self.first_name = first_name
self.last_name = last_name
self.username = username
self.employee_number = employee_number
self.employee_type = employee_type
self.email = email
self.password = password
self.admin = admin
self.banned = banned
self.login_attempts = 0
def describe_user(self):
print("\nUsername: " + self.username)
print(
"Name: " + self.first_name.title() + " " + self.last_name.title())
print("Employee Number: " + str(self.employee_number))
print("Employee Type: " + self.employee_type.title())
print("User Email: " + self.email)
print("Password: " + self.password)
print("User is Admin: " + str(self.admin))
print("User is Banned: " + str(self.banned))
def greet_user(self):
print("\nHello " + self.first_name.title() + "!")
def increment_login_attempts(self):
"""Increment a user's login attempts by 1"""
self.login_attempts += 1
def reset_login_attempts(self):
"""Reset a user's login attempts to 0"""
self.login_attempts = 0
def get_user_login_attempts(self):
"""Print a user's number of login attempts"""
print(self.username + " has attempted to log in " +
str(self.login_attempts) + " times.") | [
"lonewolfcub020@gmail.com"
] | lonewolfcub020@gmail.com |
2dcf6c43da0f2e8dca468ba4306efb52659010fa | 18fa8cd12c0082df6887ab00381d7134ec41ec9e | /actors.py | 3d1ab0d378a18ff6e60f5f1ec3bb6545cd4a2986 | [] | no_license | reddit-pygame/Rotation-Thruster-Movement | 9afb7e87b632f3956160946ad9f82e26a9538b7e | f6ed16061697f6d4349cd34c146f95933abcb03b | refs/heads/master | 2021-01-19T08:05:33.725764 | 2015-08-01T11:10:55 | 2015-08-01T11:10:55 | 39,951,568 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,205 | py | """
This module contains the Player class for the user controlled character.
"""
import pygame as pg
import prepare
class Player(pg.sprite.Sprite):
"""
This class represents our user controlled character.
"""
def __init__(self, pos, image, speed=7, *groups):
super(Player, self).__init__(*groups)
self.speed = speed
self.image = pg.transform.rotozoom(image, 90, prepare.SCALE_FACTOR)
self.rect = self.image.get_rect(center=pos)
def update(self, keys, bounding):
"""
Updates the players position based on currently held keys.
"""
move = self.check_keys(keys)
self.rect.move_ip(*move)
self.rect.clamp_ip(bounding)
def check_keys(self, keys):
"""
Find the players movement vector from key presses.
"""
move = [0, 0]
for key in prepare.DIRECT_DICT:
if keys[key]:
for i in (0, 1):
move[i] += prepare.DIRECT_DICT[key][i]*self.speed
return move
def draw(self, surface):
"""
Basic draw function. (not used if drawing via groups)
"""
surface.blit(self.image, self.rect)
| [
"3ror@live.com"
] | 3ror@live.com |
9b20d644629767b38d8dcea784f6bfdbb209c97d | ac4b9385b7ad2063ea51237fbd8d1b74baffd016 | /.history/utils/ocr/handle_image_20210209171341.py | a2f29889d4bc94d4226866258b46adb684002b71 | [] | no_license | preethanpa/ssoemprep | 76297ef21b1d4893f1ac2f307f60ec72fc3e7c6f | ce37127845253c768d01aeae85e5d0d1ade64516 | refs/heads/main | 2023-03-09T00:15:55.130818 | 2021-02-20T06:54:58 | 2021-02-20T06:54:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,038 | py | import os
import cv2
import re
import numpy as np
from PIL import Image
import pytesseract
from pytesseract import Output
from fpdf import FPDF
'''
IMAGE HANDLING METHODS
'''
# get grayscale image
def get_grayscale(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# blur removal
def remove_blur(image):
return cv2.medianBlur(image,5)
# noise removal
def remove_noise(image):
return cv2.fastNlMeansDenoisingColored(image, None, 10, 10, 7, 15)
#thresholding
def thresholding(image):
return cv2.threshold(image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)[1]
#dilation
def dilate(image):
kernel = np.ones((5,5),np.uint8)
return cv2.dilate(image, kernel, iterations = 1)
#erosion
def erode(image):
kernel = np.ones((5,5),np.uint8)
return cv2.erode(image, kernel, iterations = 1)
def extract_pdf_from_image(fileName='', pdf_path='', action='', psm=3):
'''
Extract text from image and save as PDF.
fileName=''
pdf_path='',
action='',
psm=3
'''
print(f'FileName is {fileName}')
#custom_config = r'-c tessedit_char_whitelist=123456789MALEPQRETHANabcdefghijklmnopqrstuvwxyz --psm 6'
#custom_config = r'-l eng --psm 11'
custom_config = r'-l eng --psm ' + str(psm)
pdfdir = pdf_path
if not os.path.exists(pdfdir):
os.makedirs(pdfdir)
# pdfFileName = os.path.basename(fileName).split('.')[0] + '.pdf'
pdfFileName = os.path.basename(fileName).split('.')[0]+ '.pdf'
pdfFilePath = pdfdir + '/' + pdfFileName
print(f'PDF File Path {pdfFilePath}')
#d = pytesseract.image_to_data(img, output_type=Output.DICT)
img = cv2.imread(fileName)
img1 = None
if (action == 1):
img1 = remove_noise(img)
if (action == 2):
img1 = get_grayscale(img)
#img1 = erode(img)
if (action == 3):
img1 = remove_blur(img)
#text = pytesseract.image_to_string(img1, config=custom_config,lang='eng')
text = pytesseract.image_to_pdf_or_hocr(img1, extension='pdf')
with open(pdfFilePath, mode = 'w+b') as f:
f.write(text)
return pdfFilePath
def convert_text_to_pdf(text='', pdf_path='', filename=''):
'''
Convert text file to PDF
text=''
pdf_path=''
filename=''
'''
tempdir = "/tmp"
pdfdir = pdf_path
textFileName = tempdir + '/' + filename + ".txt"
pdfFileName = pdfdir + '/' + filename + ".pdf"
if not os.path.exists(tempdir):
os.makedirs(tempdir)
if not os.path.exists(pdfdir):
os.makedirs(pdfdir)
# save FPDF() class into a
# variable pdf
pdf = FPDF()
# Add a page
pdf.add_page()
# set style and size of font
# that you want in the pdf
pdf.set_font("Arial", size = 15)
with open(textFileName, mode = 'w+b') as f:
f.write(text)
line = 1
f = open(textFileName, "r")
for x in f:
x1 = re.sub(u"(\u2014|\u2018|\u2019|\u201c|\u201d)", "", x)
pdf.cell(100, 10, txt=x1, ln=line, align='L')
line=line+1
#save the pdf with name .pdf
pdf.output(pdfFileName,'F')
def mark_region(image_path):
print(f'image_path {image_path}')
image = None
im = cv2.imread(image_path)
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (9,9), 0)
thresh = cv2.adaptiveThreshold(blur,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV,11,30)
# Dilate to combine adjacent text contours
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (9,9))
dilate = cv2.dilate(thresh, kernel, iterations=4)
# Find contours, highlight text areas, and extract ROIs
cnts = cv2.findContours(dilate, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
line_items_coordinates = []
for c in cnts:
area = cv2.contourArea(c)
x,y,w,h = cv2.boundingRect(c)
if y >= 600 and x <= 1000:
if area > 10000:
image = cv2.rectangle(im, (x,y), (2200, y+h), color=(255,0,255), thickness=3)
line_items_coordinates.append([(x,y), (2200, y+h)])
if y >= 2400 and x<= 2000:
image = cv2.rectangle(im, (x,y), (2200, y+h), color=(255,0,255), thickness=3)
line_items_coordinates.append([(x,y), (2200, y+h)])
print
return (image, line_items_coordinates) | [
"{abhi@third-ray.com}"
] | {abhi@third-ray.com} |
3e7e1847a9be763898e6e7638a31882588c3e027 | a933712aed95b8d05d2a81d2d85cd67f3143b328 | /code/libraries/testPTable.py | ad68b66cd19ade5ebeef3881ba946244a18e4e1d | [] | no_license | stefanoborini/quantum-chemistry | a9b11bc6ffffe44c45d046518c1a3f31c470af44 | 5f92b013018e65bea5354be95e2538b5434d1fe7 | refs/heads/master | 2020-03-28T09:44:28.401512 | 2018-09-23T22:39:41 | 2018-09-23T22:39:41 | 148,056,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,356 | py | import os
import sys
import unittest
import tempfile
from ptable import PTable,Element
"""
This is a test suite for the PTable class.
"""
class PTableTestCase(unittest.TestCase):
def setUp(self):
self.elements = [
(0,'X',0.000,0.000,0.000,0,0.07,0.50,0.70,0.000),
(1,'H',0.230,0.330,1.200,1,1.00,1.00,1.00,1.008),
(2,'He',0.930,0.700,1.400,0,0.85,1.00,1.00,4.003),
(3,'Li',0.680,1.230,1.820,1,0.80,0.50,1.00,6.941),
(4,'Be',0.350,0.900,1.700,2,0.76,1.00,0.00,9.012),
(5,'B',0.830,0.820,2.080,3,1.00,0.71,0.71,10.812),
(6,'C',0.680,0.770,1.950,4,0.50,0.50,0.50,12.011),
(7,'N',0.680,0.700,1.850,4,0.05,0.05,1.00,14.007),
(8,'O',0.680,0.660,1.700,2,1.00,0.05,0.05,15.999),
(9,'F',0.640,0.611,1.730,1,0.70,1.00,1.00,18.998),
(10,'Ne',1.120,0.700,1.540,0,0.70,0.89,0.96,20.180),
(11,'Na',0.970,1.540,2.270,1,0.67,0.36,0.95,22.990),
(12,'Mg',1.100,1.360,1.730,2,0.54,1.00,0.00,24.305),
(13,'Al',1.350,1.180,2.050,6,0.75,0.65,0.65,26.982),
(14,'Si',1.200,0.937,2.100,6,0.50,0.60,0.60,28.086),
(15,'P',0.750,0.890,2.080,5,1.00,0.50,0.00,30.974),
(16,'S',1.020,1.040,2.000,6,1.00,1.00,0.19,32.067),
(17,'Cl',0.990,0.997,1.970,1,0.12,0.94,0.12,35.453),
(18,'Ar',1.570,1.740,1.880,0,0.50,0.82,0.89,39.948),
(19,'K',1.330,2.030,2.750,1,0.56,0.25,0.83,39.098),
(20,'Ca',0.990,1.740,1.973,2,0.24,1.00,0.00,40.078),
(21,'Sc',1.440,1.440,1.700,6,0.90,0.90,0.90,44.956),
(22,'Ti',1.470,1.320,1.700,6,0.75,0.76,0.78,47.867),
(23,'V',1.330,1.220,1.700,6,0.65,0.65,0.67,50.942),
(24,'Cr',1.350,1.180,1.700,6,0.54,0.60,0.78,51.996),
(25,'Mn',1.350,1.170,1.700,8,0.61,0.48,0.78,54.938),
(26,'Fe',1.340,1.170,1.700,6,0.50,0.48,0.78,55.845),
(27,'Co',1.330,1.160,1.700,6,0.44,0.48,0.78,58.933),
(28,'Ni',1.500,1.150,1.630,6,0.36,0.48,0.76,58.693),
(29,'Cu',1.520,1.170,1.400,6,1.00,0.48,0.38,63.546),
(30,'Zn',1.450,1.250,1.390,6,0.49,0.50,0.69,65.39),
(31,'Ga',1.220,1.260,1.870,3,0.76,0.56,0.56,69.723),
(32,'Ge',1.170,1.188,1.700,4,0.40,0.56,0.56,72.61),
(33,'As',1.210,1.200,1.850,3,0.74,0.50,0.89,74.922),
(34,'Se',1.220,1.170,1.900,2,1.00,0.63,0.00,78.96),
(35,'Br',1.210,1.167,2.100,1,0.65,0.16,0.16,79.904),
(36,'Kr',1.910,1.910,2.020,0,0.36,0.72,0.82,83.80),
(37,'Rb',1.470,2.160,1.700,1,0.44,0.18,0.69,85.468),
(38,'Sr',1.120,1.910,1.700,2,0.00,1.00,0.00,87.62),
(39,'Y',1.780,1.620,1.700,6,0.58,1.00,1.00,88.906),
(40,'Zr',1.560,1.450,1.700,6,0.58,0.88,0.88,91.224),
(41,'Nb',1.480,1.340,1.700,6,0.45,0.76,0.79,92.906),
(42,'Mo',1.470,1.300,1.700,6,0.33,0.71,0.71,95.94),
(43,'Tc',1.350,1.270,1.700,6,0.23,0.62,0.62,98.0),
(44,'Ru',1.400,1.250,1.700,6,0.14,0.56,0.56,101.07),
(45,'Rh',1.450,1.250,1.700,6,0.04,0.49,0.55,102.906),
(46,'Pd',1.500,1.280,1.630,6,0.00,0.41,0.52,106.42),
(47,'Ag',1.590,1.340,1.720,6,0.88,0.88,1.00,107.868),
(48,'Cd',1.690,1.480,1.580,6,1.00,0.85,0.56,112.412),
(49,'In',1.630,1.440,1.930,3,0.65,0.46,0.45,114.818),
(50,'Sn',1.460,1.385,2.170,4,0.40,0.50,0.50,118.711),
(51,'Sb',1.460,1.400,2.200,3,0.62,0.39,0.71,121.760),
(52,'Te',1.470,1.378,2.060,2,0.83,0.48,0.00,127.60),
(53,'I',1.400,1.387,2.150,1,0.58,0.00,0.58,126.904),
(54,'Xe',1.980,1.980,2.160,0,0.26,0.62,0.69,131.29),
(55,'Cs',1.670,2.350,1.700,1,0.34,0.09,0.56,132.905),
(56,'Ba',1.340,1.980,1.700,2,0.00,0.79,0.00,137.328),
(57,'La',1.870,1.690,1.700,12,0.44,0.83,1.00,138.906),
(58,'Ce',1.830,1.830,1.700,6,1.00,1.00,0.78,140.116),
(59,'Pr',1.820,1.820,1.700,6,0.85,1.00,0.78,140.908),
(60,'Nd',1.810,1.810,1.700,6,0.78,1.00,0.78,144.24),
(61,'Pm',1.800,1.800,1.700,6,0.64,1.00,0.78,145.0),
(62,'Sm',1.800,1.800,1.700,6,0.56,1.00,0.78,150.36),
(63,'Eu',1.990,1.990,1.700,6,0.38,1.00,0.78,151.964),
(64,'Gd',1.790,1.790,1.700,6,0.27,1.00,0.78,157.25),
(65,'Tb',1.760,1.760,1.700,6,0.19,1.00,0.78,158.925),
(66,'Dy',1.750,1.750,1.700,6,0.12,1.00,0.78,162.50),
(67,'Ho',1.740,1.740,1.700,6,0.00,1.00,0.61,164.930),
(68,'Er',1.730,1.730,1.700,6,0.00,0.90,0.46,167.26),
(69,'Tm',1.720,1.720,1.700,6,0.00,0.83,0.32,168.934),
(70,'Yb',1.940,1.940,1.700,6,0.00,0.75,0.22,173.04),
(71,'Lu',1.720,1.720,1.700,6,0.00,0.67,0.14,174.967),
(72,'Hf',1.570,1.440,1.700,6,0.30,0.76,1.00,178.49),
(73,'Ta',1.430,1.340,1.700,6,0.30,0.65,1.00,180.948),
(74,'W',1.370,1.300,1.700,6,0.13,0.58,0.84,183.84),
(75,'Re',1.350,1.280,1.700,6,0.15,0.49,0.67,186.207),
(76,'Os',1.370,1.260,1.700,6,0.15,0.40,0.59,190.23),
(77,'Ir',1.320,1.270,1.700,6,0.09,0.33,0.53,192.217),
(78,'Pt',1.500,1.300,1.720,6,0.96,0.93,0.82,195.078),
(79,'Au',1.500,1.340,1.660,6,0.80,0.82,0.12,196.967),
(80,'Hg',1.700,1.490,1.550,6,0.71,0.71,0.76,200.59),
(81,'Tl',1.550,1.480,1.960,3,0.65,0.33,0.30,204.383),
(82,'Pb',1.540,1.480,2.020,4,0.34,0.35,0.38,207.2),
(83,'Bi',1.540,1.450,1.700,3,0.62,0.31,0.71,208.980),
(84,'Po',1.680,1.460,1.700,2,0.67,0.36,0.00,209.0),
(85,'At',1.700,1.450,1.700,1,0.46,0.31,0.27,210.0),
(86,'Rn',2.400,2.400,1.700,0,0.26,0.51,0.59,222.0),
(87,'Fr',2.000,2.000,1.700,1,0.26,0.00,0.40,223.0),
(88,'Ra',1.900,1.900,1.700,2,0.00,0.49,0.00,226.0),
(89,'Ac',1.880,1.880,1.700,6,0.44,0.67,0.98,227.0),
(90,'Th',1.790,1.790,1.700,6,0.00,0.73,1.00,232.038),
(91,'Pa',1.610,1.610,1.700,6,0.00,0.63,1.00,231.036),
(92,'U',1.580,1.580,1.860,6,0.00,0.56,1.00,238.029),
(93,'Np',1.550,1.550,1.700,6,0.00,0.50,1.00,237.0),
(94,'Pu',1.530,1.530,1.700,6,0.00,0.42,1.00,244.0),
(95,'Am',1.510,1.070,1.700,6,0.33,0.36,0.95,243.0),
(96,'Cm',1.500,0.000,1.700,6,0.47,0.36,0.89,247.0),
(97,'Bk',1.500,0.000,1.700,6,0.54,0.31,0.89,247.0),
(98,'Cf',1.500,0.000,1.700,6,0.63,0.21,0.83,251.0),
(99,'Es',1.500,0.000,1.700,6,0.70,0.12,0.83,252.0),
(100,'Fm',1.500,0.000,1.700,6,0.70,0.12,0.73,257.0),
(101,'Md',1.500,0.000,1.700,6,0.70,0.05,0.65,258.0),
(102,'No',1.500,0.000,1.700,6,0.74,0.05,0.53,259.0),
(103,'Lr',1.500,0.000,1.700,6,0.78,0.00,0.40,262.0),
(104,'Rf',1.600,0.000,1.700,6,0.80,0.00,0.35,261.0),
(105,'Db',1.600,0.000,1.700,6,0.82,0.00,0.31,262.0),
(106,'Sg',1.600,0.000,1.700,6,0.85,0.00,0.27,263.0),
(107,'Bh',1.600,0.000,1.700,6,0.88,0.00,0.22,264.0),
(108,'Hs',1.600,0.000,1.700,6,0.90,0.00,0.18,265.0),
(109,'Mt',1.600,0.000,1.700,6,0.92,0.00,0.15,268.0),
(110,'Uun',1.600,0.000,1.700,6,0.95,0.00,0.11,269.0)]
self.p = PTable()
class PTableTest1(PTableTestCase):
"Base behaviour tests"
def testGetElementBySymbol(self):
for atnum,symbol,rcov,rbo,rvdw,maxbnd,red,green,blue,mass in self.elements:
e = self.p.getElementBySymbol(symbol)
self.assertEqual(e.atomicNumber,atnum)
self.assertEqual(e.symbol,symbol)
self.assertEqual(e.covalentRadius,rcov)
self.assertEqual(e.bohrRadius,rbo)
self.assertEqual(e.vanDerWaalsRadius,rvdw)
self.assertEqual(e.maxBondOrder,maxbnd)
self.assertEqual(e.color,(red,green,blue))
self.assertEqual(e.atomicMass,mass)
def testGetElementByAtomicNumber(self):
for atnum,symbol,rcov,rbo,rvdw,maxbnd,red,green,blue,mass in self.elements:
e = self.p.getElementByAtomicNumber(atnum)
self.assertEqual(e.atomicNumber,atnum)
self.assertEqual(e.symbol,symbol)
self.assertEqual(e.covalentRadius,rcov)
self.assertEqual(e.bohrRadius,rbo)
self.assertEqual(e.vanDerWaalsRadius,rvdw)
self.assertEqual(e.maxBondOrder,maxbnd)
self.assertEqual(e.color,(red,green,blue))
self.assertEqual(e.atomicMass,mass)
def testReversibility1(self):
for atnum,symbol,rcov,rbo,rvdw,maxbnd,red,green,blue,mass in self.elements:
e1 = self.p.getElementByAtomicNumber(atnum)
e2 = self.p.getElementBySymbol(e1.symbol)
self.assertEqual(e1.atomicNumber,e2.atomicNumber)
def testReversibility2(self):
for atnum,symbol,rcov,rbo,rvdw,maxbnd,red,green,blue,mass in self.elements:
e1 = self.p.getElementBySymbol(symbol)
e2 = self.p.getElementByAtomicNumber(e1.atomicNumber)
self.assertEqual(e1.symbol,e2.symbol)
if __name__ == '__main__':
unittest.main()
# vim: et ts=4 sw=4
| [
"stefano.borini@gmail.com"
] | stefano.borini@gmail.com |
fe0444560a8e563535551046d5c1226618e180c7 | d5370925ce06a5b26a76b3fde3ce6da2100c43cd | /setup.py | 1210b5f0be2366394d3232ace3c1ce2ae80a078d | [
"MIT"
] | permissive | a-tal/esi-bot | c92c5ba8aef40f6a25701c5150bcb9ecdb0aeff1 | 81418d6438957c8299927fdb267c6598ad521bbe | refs/heads/master | 2020-03-20T20:55:18.205210 | 2018-06-18T06:03:46 | 2018-06-18T06:13:10 | 137,714,477 | 0 | 0 | null | 2018-06-18T05:48:08 | 2018-06-18T05:48:07 | null | UTF-8 | Python | false | false | 943 | py | """ESI slack bot."""
import os
from setuptools import setup
from setuptools import find_packages
from setuphelpers import git_version
from setuphelpers import long_description
setup(
name="esi-bot",
version=git_version(),
description="ESI slack bot",
long_description=long_description(),
packages=find_packages(),
author="Adam Talsma",
author_email="adam@talsma.ca",
url="https://github.com/esi/esi-bot/",
download_url="https://github.com/esi/esi-bot/",
install_requires=[
"requests >= 2.18.4",
"slackclient >= 1.2.1",
"gevent >= 1.2.2",
],
setup_requires=["setuphelpers >= 0.1.2"],
entry_points={"console_scripts": ["esi-bot = esi_bot.bot:main"]},
classifiers=[
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
],
)
| [
"github@talsma.ca"
] | github@talsma.ca |
08e18455f072120dcb93754c798d761535899ac4 | 0e8e9bef32f40f5d0fd8c302293ae66732770b66 | /2015/pythonlearn/numpy/aboutSinSpeed.py | 1bbd1191245377d4199d35b7cfe85988651d4956 | [] | no_license | tuouo/selfstudy | 4a33ec06d252388816ad38aa44838e2b728178d4 | 139a0d63477298addfff5b9ea8d39fab96734f25 | refs/heads/master | 2021-01-24T18:37:31.023908 | 2018-03-01T02:55:16 | 2018-03-01T02:55:16 | 84,453,967 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time, math, numpy as np
x = [i * 0.001 for i in range(100000)]
start = time.clock()
for i, v in enumerate(x):
x[i] = math.sin(v)
end = time.clock()
print("math.sin:", end - start)
x = [i * 0.001 for i in range(100000)]
start2 = time.clock()
x = np.array(x)
np.sin(x, x)
end2 = time.clock()
print("numpy.sin:", end2 - start2)
print("math/numpy:", (end - start) / (end2 - start2))
x = [i * 0.001 for i in range(100000)]
start3 = time.clock()
for i, v in enumerate(x):
x[i] = np.sin(v)
end3 = time.clock()
print("numpy.sin per:", end3 - start3)
print("math/per:", (end - start) / (end3 - start3)) | [
"ltytuotuo@gmail.com"
] | ltytuotuo@gmail.com |
4178c42be2a16843929bf99b3abf9d4f92102c0b | 3d4292db087b8ab98b4751a16b50bcee90dd78bb | /freshdesk/v2/errors.py | 964241d4ecbf820b5877b3f43be210c76f3ed6c6 | [
"BSD-2-Clause"
] | permissive | sjkingo/python-freshdesk | 3a0588ba554e14705a13eb3ab8d8c65f9b1545e1 | 87ccfb3d945c5867bfa92d55aad03dc98466a2aa | refs/heads/master | 2023-01-09T08:10:20.113448 | 2022-09-16T01:29:20 | 2022-09-16T01:29:20 | 28,647,308 | 85 | 82 | BSD-2-Clause | 2023-01-07T00:16:05 | 2014-12-30T22:15:02 | Python | UTF-8 | Python | false | false | 626 | py | from requests import HTTPError
class FreshdeskError(HTTPError):
"""
Base error class.
Subclassing HTTPError to avoid breaking existing code that expects only HTTPErrors.
"""
class FreshdeskBadRequest(FreshdeskError):
"""Most 40X and 501 status codes"""
class FreshdeskUnauthorized(FreshdeskError):
"""401 Unauthorized"""
class FreshdeskAccessDenied(FreshdeskError):
"""403 Forbidden"""
class FreshdeskNotFound(FreshdeskError):
"""404"""
class FreshdeskRateLimited(FreshdeskError):
"""429 Rate Limit Reached"""
class FreshdeskServerError(FreshdeskError):
"""50X errors"""
| [
"sam@sjkwi.com.au"
] | sam@sjkwi.com.au |
d0405afd7c812af5e36619f67ceca4c53351cce2 | 7e145d1fff87cdabf7c9ae9c08637f299fbd3849 | /222. complete binary tree.py | 5ac0c8ab6f3a81cac087cc91c8a121b54628f920 | [] | no_license | dundunmao/LeetCode2019 | 2b39ef181a7f66efc9de7d459b11eb1a4a7f60a8 | 9b38a7742a819ac3795ea295e371e26bb5bfc28c | refs/heads/master | 2020-09-16T16:46:50.482697 | 2020-06-07T08:01:16 | 2020-06-07T08:01:16 | 223,833,958 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,557 | py | # 满足三点:
# 1:左右都是full
# 2:左边min > 右边 max
# 3: 左边max 与 右边min 的差 小于等于1
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
class Solution:
"""
@param root, the root of binary tree.
@return true if it is a complete binary tree, or false.
"""
def isComplete(self, root):
# Write your code here
validate, max, min= self.helper(root)
return validate
def helper(self,root):
if root is None:
return True, 0,0
left = self.helper(root.left)
right = self.helper(root.right)
if not left[0] or not right[0]:
return False, 0,0
if left[2]<right[1] or left[1]>right[2]+1:
return False, 0,0
return True, left[1]+1, right[2]+1
if __name__ == '__main__':
# TREE 1
# Construct the following tree
# 1
# / \
# 2 3
# / \
# 4 5
# / \
# 6 7
# \
# 8
P = TreeNode(1)
P.left = TreeNode(2)
P.left.left = TreeNode(4)
P.left.right = TreeNode(5)
# P.left.right.left = TreeNode(6)
# P.left.right.right = TreeNode(7)
# P.left.right.right.right = TreeNode(8)
P.right = TreeNode(3)
#
#
# Q = Node(26)
# Q.left = Node(10)
# Q.left.left = Node(4)
# Q.left.right = Node(6)
# Q.right = Node(3)
# # Q.right.right = Node(3)
s = Solution()
print s.isComplete(P)
| [
"dundunmao@gmail.com"
] | dundunmao@gmail.com |
886ac80011b04f26ef9f2b7d6a592fd307102e9c | 904d7d50f22447a0803da412bae719a7cfa04392 | /math/0x00-linear_algebra/8-ridin_bareback.py | 127a28653655529a75ab02fb1d7979c78586e571 | [] | no_license | dbaroli/holbertonschool-machine_learning | 354b57932b8882598bc3ba304fd2004ba4bca169 | 7f9a040f23eda32c5aa154c991c930a01b490f0f | refs/heads/master | 2023-01-21T11:50:59.796803 | 2020-11-24T02:03:51 | 2020-11-24T02:03:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | #!/usr/bin/env python3
"""function to multiply 2 matrices"""
def mat_mul(mat1, mat2):
""" multiply two matrices
Args:
mat1, mat2: Given matrices
Return:
the new mat: new_mat
"""
if len(mat1[0]) != len(mat2):
return None
else:
new_mat = []
for i in range(len(mat1)):
mat_i = []
for j in range(len(mat2[0])):
vec = 0
for k in range(len(mat2)):
vec += mat1[i][k] * mat2[k][j]
mat_i.append(vec)
new_mat.append(mat_i)
for x in new_mat:
return new_mat
| [
"931@holbertonschool.com"
] | 931@holbertonschool.com |
dd360585048649dfafbd491a46af9e01b08d22f9 | 48c522df53c37a7b9bea3d17f403556e01eeb24c | /ubuntu-20-04-scripts/build_tf_dataset/third/look_at_tokenized_att_disassembly.py | 5db7e41c41aaee82efce70e659362f1e6ba56cd3 | [] | no_license | flobotics/func_sign_prob | 32ee8a8e4d1e33bec9253cd21e7609827d370fc9 | 18000e04ea108fb2530a50a9de4f8996cde398f0 | refs/heads/master | 2023-02-10T19:08:01.536243 | 2021-01-02T23:25:38 | 2021-01-02T23:25:38 | 292,960,645 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,127 | py | import tarfile
import os
import sys
import pickle
def get_all_tar_filenames(tar_file_dir):
tar_files = list()
### check if dir exists
if not os.path.isdir(tar_file_dir):
return tar_files
files = os.listdir(tar_file_dir)
for f in files:
if f.endswith(".pickle"):
tar_files.append(f)
return tar_files
def get_pickle_file_content(full_path_pickle_file):
pickle_file = open(full_path_pickle_file,'rb')
pickle_list = pickle.load(pickle_file, encoding='latin1')
pickle_file.close()
return pickle_list
def print_one_pickle_list_item(pickle_file_content):
item = next(iter(pickle_file_content))
if item:
print(f'caller-and-callee-disassembly: {item[0]}')
print(f'return-type: {item[1]}')
else:
print('Error item[0]')
def main():
files = get_all_tar_filenames('/tmp/save_dir')
for file in files:
cont = get_pickle_file_content('/tmp/save_dir/' + file)
print_one_pickle_list_item(cont)
break
if __name__ == "__main__":
main()
| [
"ubu@ubu-VirtualBox"
] | ubu@ubu-VirtualBox |
5ae4af3e8cafe2a8f7f1d21cc8b9335b1ef170d2 | 3b9b4049a8e7d38b49e07bb752780b2f1d792851 | /src/third_party/WebKit/Tools/Scripts/webkitpy/common/webkit_finder.py | 3681938d8b0d413f85884a827054bea7f375f8a2 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft"
] | permissive | webosce/chromium53 | f8e745e91363586aee9620c609aacf15b3261540 | 9171447efcf0bb393d41d1dc877c7c13c46d8e38 | refs/heads/webosce | 2020-03-26T23:08:14.416858 | 2018-08-23T08:35:17 | 2018-09-20T14:25:18 | 145,513,343 | 0 | 2 | Apache-2.0 | 2019-08-21T22:44:55 | 2018-08-21T05:52:31 | null | UTF-8 | Python | false | false | 5,150 | py | # Copyright (c) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
class WebKitFinder(object):
def __init__(self, filesystem):
self._filesystem = filesystem
self._dirsep = filesystem.sep
self._sys_path = sys.path
self._env_path = os.environ['PATH'].split(os.pathsep)
self._webkit_base = None
self._chromium_base = None
self._depot_tools = None
def webkit_base(self):
"""Returns the absolute path to the top of the WebKit tree.
Raises an AssertionError if the top dir can't be determined."""
# Note: This code somewhat duplicates the code in
# scm.find_checkout_root(). However, that code only works if the top
# of the SCM repository also matches the top of the WebKit tree. Some SVN users
# (the chromium test bots, for example), might only check out subdirectories like
# Tools/Scripts. This code will also work if there is no SCM system at all.
if not self._webkit_base:
self._webkit_base = self._webkit_base
module_path = self._filesystem.abspath(self._filesystem.path_to_module(self.__module__))
tools_index = module_path.rfind('Tools')
assert tools_index != -1, "could not find location of this checkout from %s" % module_path
self._webkit_base = self._filesystem.normpath(module_path[0:tools_index - 1])
return self._webkit_base
def chromium_base(self):
if not self._chromium_base:
self._chromium_base = self._filesystem.dirname(self._filesystem.dirname(self.webkit_base()))
return self._chromium_base
def path_from_webkit_base(self, *comps):
return self._filesystem.join(self.webkit_base(), *comps)
def path_from_chromium_base(self, *comps):
return self._filesystem.join(self.chromium_base(), *comps)
def path_to_script(self, script_name):
"""Returns the relative path to the script from the top of the WebKit tree."""
# This is intentionally relative in order to force callers to consider what
# their current working directory is (and change to the top of the tree if necessary).
return self._filesystem.join("Tools", "Scripts", script_name)
def layout_tests_dir(self):
return self.path_from_webkit_base('LayoutTests')
def perf_tests_dir(self):
return self.path_from_webkit_base('PerformanceTests')
def depot_tools_base(self):
if not self._depot_tools:
# This basically duplicates src/build/find_depot_tools.py without the side effects
# (adding the directory to sys.path and importing breakpad).
self._depot_tools = (self._check_paths_for_depot_tools(self._sys_path) or
self._check_paths_for_depot_tools(self._env_path) or
self._check_upward_for_depot_tools())
return self._depot_tools
def _check_paths_for_depot_tools(self, paths):
for path in paths:
if path.rstrip(self._dirsep).endswith('depot_tools'):
return path
return None
def _check_upward_for_depot_tools(self):
fs = self._filesystem
prev_dir = ''
current_dir = fs.dirname(self._webkit_base)
while current_dir != prev_dir:
if fs.exists(fs.join(current_dir, 'depot_tools', 'pylint.py')):
return fs.join(current_dir, 'depot_tools')
prev_dir = current_dir
current_dir = fs.dirname(current_dir)
def path_from_depot_tools_base(self, *comps):
return self._filesystem.join(self.depot_tools_base(), *comps)
| [
"changhyeok.bae@lge.com"
] | changhyeok.bae@lge.com |
c61ebc49c8c624eea96ebf2bc480fef8cf9e104d | 40b262d813d07a113914d6009af8737898f2e096 | /Python/Python_Fundamentals/Mult_II.py | f775c6fa335690d1ec08e6dd9959ac91798f8e77 | [] | no_license | Nish8192/Python | cb6de3b96e790464a0a4ad10eda86ce4f79688b4 | 5c03beff6f3669d5cfb6b31c5749827db8b6a627 | refs/heads/master | 2020-12-23T16:56:18.301723 | 2017-05-27T02:09:02 | 2017-05-27T02:09:02 | 92,563,061 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | for i in range(5, 1000000):
if(i%5==0):
print i
| [
"nish8192@gmail.com"
] | nish8192@gmail.com |
a3b0135eafbb4459910a3e5211ee9f7b52ed51e5 | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.3/tests/regressiontests/humanize/__init__.py | d652b04ec8e450dd8b2fa9eecc3f102d9dce9577 | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 108 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.3/tests/regressiontests/humanize/__init__.py | [
"ron.y.kagan@gmail.com"
] | ron.y.kagan@gmail.com |
b17666a09326f7efec2051a3498367140a3e60e5 | 547db7801930874bf8298304b7ae453695ab3751 | /zeabus_imaging_sonar/src/sonar_server_gate.py | f1460a2c779ca010d16068c44c1b386bed208eb8 | [] | no_license | skconan/zeabus2018 | 5fc764b5039799fa7d80f7e86345822b50282d2e | 16e2fc1a21daea813833f9afb89f64142b5b8682 | refs/heads/master | 2020-03-25T12:39:38.759263 | 2018-07-27T23:53:50 | 2018-07-27T23:53:50 | 143,786,404 | 0 | 1 | null | 2018-08-06T21:41:52 | 2018-08-06T21:41:52 | null | UTF-8 | Python | false | false | 1,055 | py | #!/usr/bin/env python
import sys
import cv2
import time
import numpy as np
import roslib
import rospy
import math
#from _sonar_msg import *
#from _sonar_srv import *
from zeabus_imaging_sonar.srv import sonar_gatesrv
sonar_image = None
def server(start):
global sonar_image
# try:
response = sonar_image(start)
print response
return response.theta, response.r, response.status
# except rospy.ServiceException, e:
# print "Service call failed: %s"%e
if __name__ == "__main__":
print "Waiting"
start = True
count = 0
# global sonar_image
rospy.wait_for_service('/sonar_image')
sonar_image = rospy.ServiceProxy('/sonar_image', sonar_gatesrv)
print 'service start'
while not rospy.is_shutdown():
count += 1
time.sleep(1)
response = sonar_image()
"""for i, (ri, thetai) in enumerate(zip(response.r, response.theta)):
thetai = thetai-90
print "(%d, %d)" %(ri, thetai)"""
#print (response.data.r)
print response
| [
"supakit.kr@gmail.com"
] | supakit.kr@gmail.com |
f75415b1c72053fc87600193f00e473c7bf954b2 | 0e94b21a64e01b992cdc0fff274af8d77b2ae430 | /pytorch/pytorch_11.py | 543d14d0138a272ccdee73aae7aae9665c94940a | [] | no_license | yangnaGitHub/LearningProcess | 1aed2da306fd98f027dcca61309082f42b860975 | 250a8b791f7deda1e716f361a2f847f4d12846d3 | refs/heads/master | 2020-04-15T16:49:38.053846 | 2019-09-05T05:52:04 | 2019-09-05T05:52:04 | 164,852,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,777 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Jan 3 16:08:22 2019
@author: natasha_yang
@e-mail: ityangna0402@163.com
"""
#大的数据可以压缩传输下载之后再解压还原
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.utils.data as Data
import torchvision
import matplotlib.pyplot as plt
import numpy as np
####准备数据
train_data = torchvision.datasets.MNIST(
root='./mnist',#保存的地方
train=True,
transform=torchvision.transforms.ToTensor(),#转换成torch.FloatTensor(C,H,W),训练的时候 normalize到[0.0, 1.0]区间
download=False#没下载就下载,下载好了就是False
)
BATCH_SIZE = 64
train_loader = Data.DataLoader(train_data, batch_size=BATCH_SIZE, shuffle=True)
#AutoEncoder=>[encoder, decoder]=压缩后得到压缩的特征值,再从压缩的特征值解压成原图片
class AutoEncode(nn.Module):
def __init__(self):
super(AutoEncode, self).__init__()
self.encoder = nn.Sequential(
nn.Linear(28*28, 128),
nn.Tanh(),
nn.Linear(128, 64),
nn.Tanh(),
nn.Linear(64, 12),
nn.Tanh(),
nn.Linear(12, 3))
self.decoder = nn.Sequential(
nn.Linear(3, 12),
nn.Tanh(),
nn.Linear(12, 64),
nn.Tanh(),
nn.Linear(64, 128),
nn.Tanh(),
nn.Linear(128, 28*28),
nn.Sigmoid())
def forward(self, x_input):
encode = self.encoder(x_input)
decode = self.decoder(encode)
return encode, decode
autoencoder = AutoEncode()
####定义优化函数和LOSS
optimizer = torch.optim.Adam(autoencoder.parameters(), lr=0.005)
loss_func = nn.MSELoss()
N_TEST_IMG = 5#只展示前面的5个
f, a = plt.subplots(2, N_TEST_IMG, figsize=(5, 2))#2行5列
view_data = train_data.train_data[:N_TEST_IMG].view(-1, 28*28).type(torch.FloatTensor)/255.
#第一行
for index in range(N_TEST_IMG):
a[0][index].imshow(np.reshape(view_data.data.numpy()[index], (28, 28)), cmap='gray')
a[0][index].set_xticks(())
a[0][index].set_yticks(())
for epoch in range(10):
for step, (batch_x, batch_y) in enumerate(train_loader):
batch_x, batch_x_d = Variable(batch_x.view(-1, 28*28)),Variable(batch_x.view(-1, 28*28))
#batch_y = Variable(batch_y)
encode, decode = autoencoder(batch_x)
loss = loss_func(decode, batch_x_d)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if 0 == step % 100:
print('Epoch: ', epoch, '| train loss: %.4f' % loss.data.numpy())
_, decoded_data = autoencoder(view_data)
#第二行
for index in range(N_TEST_IMG):
a[1][index].clear()
a[1][index].imshow(np.reshape(decoded_data.data.numpy()[index], (28, 28)), cmap='gray')
a[1][index].set_xticks(())
a[1][index].set_yticks(())
plt.draw()
plt.pause(0.05)
plt.ioff()
plt.show()
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
view_data = train_data.train_data[:200].view(-1, 28*28).type(torch.FloatTensor)/255.
encoded_data, _ = autoencoder(view_data)
fig = plt.figure(2)
ax = Axes3D(fig)
DX, DY, DZ = encoded_data.data[:, 0].numpy(), encoded_data.data[:, 1].numpy(), encoded_data.data[:, 2].numpy()
values = train_data.train_labels[:200].numpy()
for dx, dy, dz, dv in zip(DX, DY, DX, values):
cb = cm.rainbow(int(255*dv/9))#颜色
ax.text(dx, dy, dz, dv, backgroundcolor=cb)
ax.set_xlim(DX.min(), DX.max())
ax.set_ylim(DY.min(), DY.max())
ax.set_zlim(DZ.min(), DZ.max())
plt.show()
| [
"ityangna0402@163.com"
] | ityangna0402@163.com |
251b9960cc43b1d787b4b9b6168c4e25c2192138 | c4bb6b11e9bec547bca18cf3f1c2ac0bcda6d0b0 | /halld/test/resource.py | 17cc4c6b89c88ace5893dbaa716b05ec7a8980e4 | [] | no_license | ox-it/halld | 43e8fb38a12977bc4f2323a18fdae42683c5a494 | 917012265548b76941658a56ec83dfe2cafc8e4a | refs/heads/master | 2021-01-16T01:01:26.355534 | 2015-05-05T20:36:08 | 2015-05-05T20:36:08 | 16,612,456 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,645 | py | import json
import unittest
from rest_framework.test import force_authenticate
from .base import TestCase
from .. import models
from .. import response_data
class ResourceListTestCase(TestCase):
def create_resources(self):
# Will be defunct by default.
self.defunct_resource = models.Resource.create(self.superuser, 'snake')
self.extant_resource = models.Resource.create(self.superuser, 'snake')
models.Source.objects.create(resource=self.extant_resource,
type_id='science',
data={'foo': 'bar'},
author=self.superuser,
committer=self.superuser)
self.assertEqual(self.extant_resource.extant, True)
def testGetResourceList(self):
request = self.factory.get('/snake')
force_authenticate(request, self.anonymous_user)
response = self.resource_list_view(request, 'snake')
self.assertIsInstance(response.data, response_data.ResourceList)
def testDefunctResources(self):
self.create_resources()
request = self.factory.get('/snake?defunct=on&extant=off')
request.user = self.anonymous_user
response = self.resource_list_view(request, 'snake')
self.assertIsInstance(response.data, response_data.ResourceList)
self.assertEqual(response.data['paginator'].count, 1)
self.assertEqual(next(response.data.resource_data)['self']['href'],
self.defunct_resource.href)
def testExtantResources(self):
self.create_resources()
request = self.factory.get('/snake')
force_authenticate(request, self.anonymous_user)
response = self.resource_list_view(request, 'snake')
self.assertIsInstance(response.data, response_data.ResourceList)
self.assertEqual(response.data['paginator'].count, 1)
self.assertEqual(next(response.data.resource_data)['self']['href'],
self.extant_resource.href)
class ResourceDetailTestCase(TestCase):
def testViewResource(self):
resource = models.Resource.create(self.superuser, 'snake')
resource.data = {'title': 'Python'}
resource.save(regenerate=False)
request = self.factory.get('/snake/' + resource.identifier,
headers={'Accept': 'application/hal+json'})
force_authenticate(request, self.anonymous_user)
response = self.resource_detail_view(request, 'snake', resource.identifier)
self.assertEqual(response.data.data.get('title'), 'Python')
| [
"alexander.dutton@it.ox.ac.uk"
] | alexander.dutton@it.ox.ac.uk |
12b6cb094a4322a405a3ee6b7bc100899285342d | ade82efdb6dfaa402ea5999698de10b29ba142c0 | /30DaysOfCode/day26-nested-logic.py | 57968d3c73a6952bb69ae3f066ae5174146ab56d | [] | no_license | markronquillo/programming-challenges-and-algorithms | 057747a3a9311d61b9a8f7bf6048ea95bb533a2e | 0deb686069cd82bd0c7d0bf7e03aabd7f1359da3 | refs/heads/master | 2021-01-12T05:18:23.637189 | 2017-12-02T14:45:29 | 2017-12-02T14:45:29 | 77,906,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py |
act_date = input()
exp_date = input()
def compute(act_date, exp_date):
act_day, act_month, act_year = [int(x) for x in act_date.split(' ')]
exp_day, exp_month, exp_year = [int(x) for x in exp_date.split(' ')]
if act_year > exp_year:
return 10000
elif act_month > exp_month:
if act_year < exp_year:
return 0
return 500 * (act_month - exp_month)
elif act_day > exp_day:
if act_month < exp_month:
return 0
return 15 * (act_day - exp_day)
else:
return 0
print(compute(act_date, exp_date))
| [
"markronquillo23@gmail.com"
] | markronquillo23@gmail.com |
43352cd049ced563696e7de3b487e343123b42d3 | 42f6b269f1baa87e295bd2281fb5a7e975b21ac7 | /04.For_Loop/03.More_Exercises/probe.py | 9909f6841f03786bf4604e4523fdbc5ad29f3626 | [] | no_license | Tuchev/Python-Basics---september---2020 | 54dab93adb41aa998f2c043f803f4751bbc8c758 | 8899cc7323611dd6eed00aa0550071a49dc72704 | refs/heads/main | 2023-06-19T11:48:11.607069 | 2021-07-15T13:33:02 | 2021-07-15T13:33:02 | 386,295,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | n = int(input())
even_sum = 0
odd_sum = 0
for number in range(1, n + 1):
current_number = int(input())
if number % 2 == 0
even_sum += current_number
else:
odd_sum += current_number
| [
"noreply@github.com"
] | Tuchev.noreply@github.com |
01d3dbef5745f091babf67730a13213939cded7b | 245b92f4140f30e26313bfb3b2e47ed1871a5b83 | /airflow/providers/google_vendor/googleads/v12/services/types/bidding_seasonality_adjustment_service.py | 9627a896feddaeef267b36c368273ea14a1ac92e | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ephraimbuddy/airflow | 238d6170a0e4f76456f00423124a260527960710 | 3193857376bc2c8cd2eb133017be1e8cbcaa8405 | refs/heads/main | 2023-05-29T05:37:44.992278 | 2023-05-13T19:49:43 | 2023-05-13T19:49:43 | 245,751,695 | 2 | 1 | Apache-2.0 | 2021-05-20T08:10:14 | 2020-03-08T04:28:27 | null | UTF-8 | Python | false | false | 7,069 | py | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from airflow.providers.google_vendor.googleads.v12.enums.types import (
response_content_type as gage_response_content_type,
)
from airflow.providers.google_vendor.googleads.v12.resources.types import (
bidding_seasonality_adjustment as gagr_bidding_seasonality_adjustment,
)
from google.protobuf import field_mask_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package="airflow.providers.google_vendor.googleads.v12.services",
marshal="google.ads.googleads.v12",
manifest={
"MutateBiddingSeasonalityAdjustmentsRequest",
"BiddingSeasonalityAdjustmentOperation",
"MutateBiddingSeasonalityAdjustmentsResponse",
"MutateBiddingSeasonalityAdjustmentsResult",
},
)
class MutateBiddingSeasonalityAdjustmentsRequest(proto.Message):
r"""Request message for
[BiddingSeasonalityAdjustmentService.MutateBiddingSeasonalityAdjustments][google.ads.googleads.v12.services.BiddingSeasonalityAdjustmentService.MutateBiddingSeasonalityAdjustments].
Attributes:
customer_id (str):
Required. ID of the customer whose
seasonality adjustments are being modified.
operations (Sequence[google.ads.googleads.v12.services.types.BiddingSeasonalityAdjustmentOperation]):
Required. The list of operations to perform
on individual seasonality adjustments.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v12.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operations = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="BiddingSeasonalityAdjustmentOperation",
)
partial_failure = proto.Field(proto.BOOL, number=3,)
validate_only = proto.Field(proto.BOOL, number=4,)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class BiddingSeasonalityAdjustmentOperation(proto.Message):
r"""A single operation (create, remove, update) on a seasonality
adjustment.
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
create (google.ads.googleads.v12.resources.types.BiddingSeasonalityAdjustment):
Create operation: No resource name is
expected for the new seasonality adjustment.
This field is a member of `oneof`_ ``operation``.
update (google.ads.googleads.v12.resources.types.BiddingSeasonalityAdjustment):
Update operation: The seasonality adjustment
is expected to have a valid resource name.
This field is a member of `oneof`_ ``operation``.
remove (str):
Remove operation: A resource name for the removed
seasonality adjustment is expected, in this format:
``customers/{customer_id}/biddingSeasonalityAdjustments/{seasonality_adjustment_id}``
This field is a member of `oneof`_ ``operation``.
"""
update_mask = proto.Field(
proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof="operation",
message=gagr_bidding_seasonality_adjustment.BiddingSeasonalityAdjustment,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof="operation",
message=gagr_bidding_seasonality_adjustment.BiddingSeasonalityAdjustment,
)
remove = proto.Field(proto.STRING, number=3, oneof="operation",)
class MutateBiddingSeasonalityAdjustmentsResponse(proto.Message):
r"""Response message for seasonality adjustments mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (for example, auth errors), we return
an RPC level error.
results (Sequence[google.ads.googleads.v12.services.types.MutateBiddingSeasonalityAdjustmentsResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE, number=3, message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="MutateBiddingSeasonalityAdjustmentsResult",
)
class MutateBiddingSeasonalityAdjustmentsResult(proto.Message):
r"""The result for the seasonality adjustment mutate.
Attributes:
resource_name (str):
Returned for successful operations.
bidding_seasonality_adjustment (google.ads.googleads.v12.resources.types.BiddingSeasonalityAdjustment):
The mutated bidding seasonality adjustment with only mutable
fields after mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(proto.STRING, number=1,)
bidding_seasonality_adjustment = proto.Field(
proto.MESSAGE,
number=2,
message=gagr_bidding_seasonality_adjustment.BiddingSeasonalityAdjustment,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"noreply@github.com"
] | ephraimbuddy.noreply@github.com |
2727043746f5618d29fb60b6cdf111942c7228a5 | 82762d776e2400948af54ca2e1bdf282885d922c | /581. 最短无序连续子数组.py | 8bc3cb543d1ee49d622254c2d89f338667666d2e | [] | no_license | dx19910707/LeetCode | f77bab78bcba2d4002c9662c122b82fc3c9caa46 | 624975f767f6efa1d7361cc077eaebc344d57210 | refs/heads/master | 2020-03-17T02:50:46.546878 | 2019-06-25T09:22:13 | 2019-06-25T09:22:13 | 133,208,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | class Solution(object):
def findUnsortedSubarray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums2 = sorted(nums)
if nums == nums2:
return 0
i = 0
while nums[i] == nums2[i]:
i += 1
j = -1
while nums[j] == nums2[j]:
j -= 1
return len(nums[i:j])+1 | [
"dx19910707@qq.com"
] | dx19910707@qq.com |
0b15834e4571dd2343ca5edbb518650202427a7e | a301841f1ab55ab7d5fa496c4dafd2c50e01b8ed | /labour/views/public_views.py | 9956d32f2053fcde90e6c3931878d9c81cb84b59 | [
"MIT"
] | permissive | wyrmiyu/kompassi | ab47dd27c7cac97e0695a9faa0d0994343eeb98b | e089a235bcd74b6a31b34572a56f4947bebc5e60 | refs/heads/master | 2021-01-17T15:04:50.294808 | 2015-03-16T21:19:40 | 2015-03-16T21:19:40 | 16,747,694 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,225 | py | # encoding: utf-8
import json
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import Http404
from django.shortcuts import get_object_or_404, render, redirect
from django.utils.timezone import now
from django.views.decorators.http import require_http_methods
from core.helpers import person_required
from core.models import Event, Person
from core.utils import (
initialize_form,
login_redirect,
page_wizard_clear,
page_wizard_init,
page_wizard_vars,
url,
set_attrs,
)
from ..forms import SignupForm
from ..models import (
AlternativeSignupForm,
JobCategory,
LabourEventMeta,
PersonQualification,
Qualification,
Signup,
)
from ..helpers import labour_event_required
from .view_helpers import initialize_signup_forms
# XXX hackish
def qualifications_related():
result = []
for qual in Qualification.objects.all():
# wouldn't need labour_person_(dis)qualify_view if they used POST as they should
for view_name in ['labour_person_qualification_view', 'labour_person_qualify_view', 'labour_person_disqualify_view']:
result.append(url(view_name, qual.slug))
return result
@labour_event_required
@require_http_methods(['GET', 'POST'])
def labour_signup_view(request, event, alternative_form_slug=None):
"""
This is the "gate" function. The implementation is in
`actual_labour_signup_view`.
The purpose of this function is to redirect new users through the process
of registering an account, entering qualifications and only then signing up.
Existing users are welcomed to sign up right away.
"""
if alternative_form_slug:
actual_signup_url = url('labour_special_signup_view', event.slug, alternative_form_slug)
else:
actual_signup_url = url('labour_signup_view', event.slug)
if not request.user.is_authenticated():
pages = [
('core_login_view', u'Sisäänkirjautuminen'),
('core_registration_view', u'Rekisteröityminen'),
('labour_qualifications_view', u'Pätevyydet', qualifications_related()),
(actual_signup_url, u'Ilmoittautuminen'),
]
page_wizard_init(request, pages)
return login_redirect(request)
try:
person = request.user.person
except Person.DoesNotExist:
pages = [
('core_personify_view', u'Perustiedot'),
('labour_qualifications_view', u'Pätevyydet', qualifications_related()),
(actual_signup_url, u'Ilmoittautuminen'),
]
page_wizard_init(request, pages)
return redirect('core_personify_view')
return actual_labour_signup_view(request, event, alternative_form_slug=alternative_form_slug)
def actual_labour_signup_view(request, event, alternative_form_slug):
vars = page_wizard_vars(request)
signup = event.labour_event_meta.get_signup_for_person(request.user.person)
if alternative_form_slug is not None:
# Alternative signup form specified via URL
alternative_signup_form = get_object_or_404(AlternativeSignupForm, event=event, slug=alternative_form_slug)
if (
signup.alternative_signup_form_used is not None and \
signup.alternative_signup_form_used.pk != alternative_signup_form.pk
):
messages.error(request, u'Hakemusta ei ole tehty käyttäen tätä lomaketta.')
return redirect('core_event_view', event.slug)
elif signup.pk is not None and signup.alternative_signup_form_used is not None:
# Alternative signup form used to sign up
alternative_signup_form = signup.alternative_signup_form_used
else:
# Use default signup form
alternative_signup_form = None
if alternative_signup_form is not None:
# Using an alternative signup form
if not alternative_signup_form.is_active:
messages.error(request, u'Pyytämäsi ilmoittautumislomake ei ole käytössä.')
return redirect('core_event_view', event.slug)
if alternative_signup_form.signup_message:
messages.warning(request, alternative_signup_form.signup_message)
SignupFormClass = alternative_signup_form.signup_form_class
SignupExtraFormClass = alternative_signup_form.signup_extra_form_class
else:
# Using default signup form
if not event.labour_event_meta.is_registration_open:
messages.error(request, u'Ilmoittautuminen tähän tapahtumaan ei ole avoinna.')
return redirect('core_event_view', event.slug)
if event.labour_event_meta.signup_message:
messages.warning(request, event.labour_event_meta.signup_message)
SignupFormClass = None
SignupExtraFormClass = None
if signup.is_processed:
messages.error(request,
u'Hakemuksesi on jo käsitelty, joten et voi enää muokata sitä. '
u'Tarvittaessa ota yhteyttä työvoimatiimiin.'
)
return redirect('core_event_view', event.slug)
if signup.pk is not None:
old_state = signup.state
submit_text = 'Tallenna muutokset'
else:
old_state = None
submit_text = 'Lähetä ilmoittautuminen'
signup_extra = signup.signup_extra
signup_form, signup_extra_form = initialize_signup_forms(request, event, signup,
SignupFormClass=SignupFormClass,
SignupExtraFormClass=SignupExtraFormClass,
)
if request.method == 'POST':
if signup_form.is_valid() and signup_extra_form.is_valid():
if signup.pk is None:
message = u'Kiitos ilmoittautumisestasi!'
else:
message = u'Ilmoittautumisesi on päivitetty.'
if alternative_signup_form is not None:
signup.alternative_signup_form_used = alternative_signup_form
set_attrs(signup, **signup_form.get_excluded_field_defaults())
set_attrs(signup_extra, **signup_extra_form.get_excluded_field_defaults())
signup = signup_form.save()
signup_extra.signup = signup
signup_extra = signup_extra_form.save()
if alternative_signup_form is not None:
# Save m2m field defaults
for obj, form in [
(signup, signup_form),
(signup_extra, signup_extra_form),
]:
defaults = form.get_excluded_m2m_field_defaults()
if defaults:
set_attrs(obj, **defaults)
obj.save()
signup.apply_state()
messages.success(request, message)
return redirect('core_event_view', event.slug)
else:
messages.error(request, u'Ole hyvä ja korjaa virheelliset kentät.')
all_job_cats = JobCategory.objects.filter(event=event)
job_cats = JobCategory.objects.filter(event=event, public=True)
# FIXME use id and data attr instead of category name
non_qualified_category_names = [
jc.name for jc in job_cats
if not jc.is_person_qualified(request.user.person)
]
vars.update(
event=event,
signup_form=signup_form,
signup_extra_form=signup_extra_form,
submit_text=submit_text,
alternative_signup_form=alternative_signup_form,
# XXX HACK descriptions injected using javascript
job_descriptions_json=json.dumps(dict((cat.pk, cat.description) for cat in all_job_cats)),
non_qualified_category_names_json=json.dumps(non_qualified_category_names),
)
return render(request, 'labour_signup_view.jade', vars)
@person_required
def labour_profile_signups_view(request):
person = request.user.person
t = now()
signups_past_events = person.signup_set.filter(event__end_time__lte=t).order_by('-event__start_time')
signups_current_events = person.signup_set.filter(event__start_time__lte=t, event__end_time__gt=t).order_by('-event__start_time')
signups_future_events = person.signup_set.filter(event__start_time__gt=t).order_by('-event__start_time')
vars = dict(
signups_past_events=signups_past_events,
signups_current_events=signups_current_events,
signups_future_events=signups_future_events,
no_signups=not any(signups.exists() for signups in [signups_past_events, signups_current_events, signups_future_events]),
all_signups=person.signup_set.all(),
)
return render(request, 'labour_profile_signups_view.jade', vars)
@person_required
def labour_qualifications_view(request):
vars = page_wizard_vars(request)
person_qualifications = request.user.person.personqualification_set.all()
qualification_pks = [q.qualification.pk for q in person_qualifications]
available_qualifications = Qualification.objects.exclude(pk__in=qualification_pks)
vars.update(
person_qualifications=person_qualifications,
available_qualifications=available_qualifications
)
if 'page_wizard' in vars:
template_name = 'labour_new_user_qualifications_view.jade'
else:
template_name = 'labour_profile_qualifications_view.jade'
return render(request, template_name, vars)
@person_required
@require_http_methods(['GET', 'POST'])
def labour_person_qualification_view(request, qualification):
vars = page_wizard_vars(request)
person = request.user.person
qualification = get_object_or_404(Qualification, slug=qualification)
try:
person_qualification = qualification.personqualification_set.get(person=person)
except PersonQualification.DoesNotExist:
person_qualification = PersonQualification(
person=person,
qualification=qualification
)
QualificationExtra = qualification.qualification_extra_model
if QualificationExtra:
QualificationExtraForm = QualificationExtra.get_form_class()
qualification_extra = person_qualification.qualification_extra
form = initialize_form(QualificationExtraForm, request, instance=qualification_extra)
else:
qualification_extra = None
form = None
if request.method == 'POST':
form_valid = not form or (form and form.is_valid())
if form_valid:
person_qualification.save()
if form:
qualification_extra.personqualification = person_qualification
form.save()
messages.success(request, u'Pätevyys tallennettiin.')
return redirect('labour_qualifications_view')
else:
messages.error(request, u'Ole hyvä ja korjaa lomakkeen virheet.')
vars.update(
person_qualification=person_qualification,
form=form
)
if 'page_wizard' in vars:
template_name = 'labour_new_user_person_qualification_view.jade'
else:
template_name = 'labour_profile_person_qualification_view.jade'
return render(request, template_name, vars)
@person_required
def labour_person_qualify_view(request, qualification):
person = request.user.person
qualification = get_object_or_404(Qualification, slug=qualification)
if qualification.qualification_extra_model:
return redirect('labour_person_qualification_view', qualification.slug)
person_qualification, created = PersonQualification.objects.get_or_create(
person=person,
qualification=qualification
)
if created:
messages.success(request, u"Pätevyys lisättiin.")
return redirect('labour_qualifications_view')
@person_required
def labour_person_disqualify_view(request, qualification):
person = request.user.person
qualification = get_object_or_404(Qualification, slug=qualification)
try:
person_qualification = get_object_or_404(PersonQualification,
person=person, qualification=qualification)
person_qualification.delete()
messages.success(request, u"Pätevyys poistettiin.")
except:
pass
return redirect('labour_qualifications_view')
def labour_profile_menu_items(request):
signups_url = reverse('labour_profile_signups_view')
signups_active = request.path.startswith(signups_url)
signups_text = u"Työvoimahakemukset"
qualifications_url = reverse('labour_qualifications_view')
qualifications_active = request.path.startswith(qualifications_url)
qualifications_text = u"Pätevyydet"
return [
(signups_active, signups_url, signups_text),
(qualifications_active, qualifications_url, qualifications_text),
]
def labour_event_box_context(request, event):
signup = None
is_labour_admin = False
if request.user.is_authenticated():
is_labour_admin = event.labour_event_meta.is_user_admin(request.user)
try:
person = request.user.person
signup = Signup.objects.get(event=event, person=person)
except (Person.DoesNotExist, Signup.DoesNotExist):
pass
return dict(
signup=signup,
is_labour_admin=is_labour_admin,
)
| [
"santtu@pajukanta.fi"
] | santtu@pajukanta.fi |
6a28f4f34572bb5d2680de4aa8031dcde01e6e9f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02846/s115010620.py | 03e500ab3f57d88caf928fb9d1dd60d08d26c229 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | t1, t2 = map(int, input().split())
a1, a2 = map(int, input().split())
b1, b2 = map(int, input().split())
ave_a = t1 * a1 + t2 * a2
ave_b = t1 * b1 + t2 * b2
if ave_a < ave_b:
(a1, a2), (b1, b2) = (b1, b2), (a1, a2)
ave_a, ave_b = ave_b, ave_a
if ave_a == ave_b:
print('infinity')
exit()
half, all = t1 * (b1 - a1), ave_a - ave_b
ans = half // all * 2 + (half % all != 0)
print(max(0, ans))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
8943983fe54f8f93c3c1901374f3d729d104c32a | 79f871eccb9e7c87d60575b7147ec464964b010d | /api/tests/core/test_resource_paginated_datasets.py | a3f9185c1e43d855fff960be9f90b0e3feccd716 | [] | no_license | lucassimon/upload-datasets | 2c0e4d2945204503e8eefda5934eb8ca03561d1b | 02f2b7324f389b1940ee60c886650cb04dd5e80e | refs/heads/master | 2023-01-12T07:14:13.175505 | 2019-12-08T21:41:17 | 2019-12-08T21:41:17 | 226,594,431 | 0 | 0 | null | 2023-01-05T02:32:22 | 2019-12-08T00:33:52 | Python | UTF-8 | Python | false | false | 1,225 | py | import pytest
from unittest import mock
from apps.core.models import Dataset
from .factories import DatasetFactory
from tests.utils import replace_id_to_take_snapshot
class TestDataset:
def setup_method(self):
DatasetFactory.reset_sequence()
def teardown_method(self):
Dataset.objects.delete()
def make_request(self, client, query=None):
url = "/datasets/page/1"
return client.get(url, content_type="application/json")
def test_should_response_dataset_paginated(self, auth, mongo, snapshot):
DatasetFactory.create_batch(3)
response = self.make_request(auth)
assert response.status_code == 200
res = response.json
items = replace_id_to_take_snapshot(response.json.get("data"))
res["data"] = items
snapshot.assert_match(res)
@mock.patch("apps.core.repositories.Dataset.objects")
def test_should_response_exception_when_an_error_raised(
self, DatasetMock, auth, mongo
):
DatasetMock.side_effect = Exception("Some error occurred")
response = self.make_request(auth)
assert response.status_code == 500
assert response.json.get("description") == "Some error occurred"
| [
"lucassrod@gmail.com"
] | lucassrod@gmail.com |
17f33729a06fe96c12e6d2add6182124fa44708c | 184f13269249b08e5b62444ece10af8a3a35c9a5 | /python_nlp_explorations_chatbot_keywords_extraction/article_3_keyword_extraction_nlp_spacy/05_kw_extractor_spacy_linguistic_features.py | b09b4d1d37e43a4303f452073fb8f9226411fc4c | [
"MIT"
] | permissive | bflaven/BlogArticlesExamples | 3decf588098897b104d429054b8e44efec796557 | aca40bb33f1ad4e140ddd67d6bb39bdd029ef266 | refs/heads/master | 2023-09-04T16:57:57.498673 | 2023-09-01T13:14:12 | 2023-09-01T13:14:12 | 42,390,873 | 9 | 4 | MIT | 2023-03-02T22:39:06 | 2015-09-13T09:48:34 | HTML | UTF-8 | Python | false | false | 5,184 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
"""
cd /Users/brunoflaven/Documents/02_copy/_000_IA_bruno_light/_my_article_python-explorations/git_repo_python_explorations_nlp/article_3_keyword_extraction_nlp_spacy/
python 05_kw_extractor_spacy_linguistic_features.py
"""
import spacy
from collections import Counter
from string import punctuation
# download best-matching version of specific model for your spaCy installation
# python -m spacy download en_core_web_sm
# nlp = spacy.load("en_core_web_sm")
# doc = nlp("This is a sentence.")
# print(doc)
# download best-matching version of specific model for your spaCy installation
# python -m spacy download en_core_web_lg
nlp = spacy.load("en_core_web_lg")
# import en_core_web_lg
# nlp = en_core_web_lg.load()
def get_hotwords(text):
result = []
pos_tag = ['PROPN', 'ADJ', 'NOUN'] # 1
doc = nlp(text.lower()) # 2
for token in doc:
# 3
if(token.text in nlp.Defaults.stop_words or token.text in punctuation):
continue
# 4
if(token.pos_ in pos_tag):
result.append(token.text)
return result # 5
input_text = "My last post was a modest attempt in discovering Python potential with the help of Anaconda. Meanwhile, Artificial Intelligence (AI) was bewitched me 🙂 So, I gained confidence using Python in order to explore AI librairies such as NTLK, OpenCV or face_recognition from ageitgey (Adam Geitgey)." \
"You can find the files in my github account More on https://github.com/bflaven/BlogArticlesExamples/ in python_playing_with_facial_recognition" \
"This post is about a real issue that can be solved by IA. I believe that for any technical topic, rather than exploring AI documentation which in itself could be quite wastful. I always strive to tackle a real use case and apply as much as possible what I want to discover." \
"So, applying this principle, I have spotted facial recognition for pictures. Like many, I am iOS mobile Phone user and I saw facial recognition at work. I start to wonder: “How can apply those facial recognition principles in a backoffice?”. As I am currently a PO for a Backoffice, I am supposed to have a clear “vision” of my product but more than that I know perfectly its weaknesses, especially dealing with pictures." \
"The benefit of this simple question allows me not to stress about the details right away and instead quickly jot something down as a placeholder for a conversation between me, myself and I, a soliloquy. The intent was to not forget to ask all the right questions either upfront or somewhere down the road but do not prevent from starting a P.O.C." \
"Here is my shortlist that highlights some of the functional and nonfunctional concerns or requirements that I wanted to address. This list has driven all my decisions to obtain a pragmatic result without missing the goal." \
"Goal: How can apply those facial recognition principles in a Backoffice in order to improve image search results at the end?" \
"Short actions todolist:" \
"Browse unknown people images directory with Facial Recognition Script" \
"Detect known people faces among these images set them aside" \
"Insert these known people images in a MySQL database" \
"Requirement: This Spike will be all made in Python but I do not want to end up with an overzealous shopping list of requirements." \
"Let me explain the situation. In the Backoffice, which I am dealing with, is hosting thousand of “unqualified” images that contains faces of: Donald Trump, Xi Jinping, Angela Merkel, Boris Johnson, Emmanuel Macron, Vladimir Poutine, Recep Tayyip Erdoğan or less kwnown poeple in an european centric point of view: Macky Sall, Rodrigo Duterte, Ramzan Kadyrov, Hun Sen, Narendra Modi, Hassan Rohani, Stevo Pendarovski, Nicolás Maduro, Edgar Lungu..." \
"Remember that we still need a human intelligence to say who is who? For your information, Stevo Pendarovski, Стево Пендаровски, is president of North Macedonia, holding the office since 12 May 2019 and he looks a bit like me 🙂 or that is just the glasses." \
"The face of Stevo Pendarovski, Стево Пендаровски, president of North Macedonia" \
"Improve a CMS's photos library qualification with AI, facial recognition in python, to provide better images search results to users" \
"The idea is to increase the relevance of research and reduce the sample of images that will be retrieved with a traditionnal textual search based on their name. It will save time, money and resources but also improve user experience. So, user do not get no results at all or improper results in his/her image search." \
"The fact, no one wants to qualify each image by opening them one after the other, adding correct caption to improve indexation and by consequence future’s search results. We are talking about more than 1 500 000 pictures. Indeed, the wise choice is to leave it to a computer." \
output = get_hotwords(input_text)
print ("\n --- output")
print(output)
print ("\n --- result for hashtags")
hashtags = [('#' + x[0]) for x in Counter(output).most_common(5)]
print(' '.join(hashtags))
| [
"bflaven@gmail.com"
] | bflaven@gmail.com |
39e5c44826e144c6b0f533369a9640dbf2dbe45d | 7749f7156195e4f5d75ccc72d27b71951698f783 | /mysite/settings.py | d4fb049d99717d256b8dbd3e2e03168db9a20312 | [] | no_license | HiFaMi/my-first-blog | 9dddad3354b26b39cff5dc5cf9c60421c765bba5 | a7dbcc5415c0f903a6ef1691dbd7a6e52cb1b64f | refs/heads/master | 2021-04-15T14:49:06.861837 | 2018-03-21T14:05:12 | 2018-03-21T14:05:12 | 126,188,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,106 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.11.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4-8*hlo3)kb8^pjf5h&uwvfno%k6d#=*7h4szb&f8#iw^j45_4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'ko'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"rlaalsrbgk@gmail.com"
] | rlaalsrbgk@gmail.com |
3f9ffcbd11f2b59511325d196baa1f17d53eccc4 | ae69d0402e48f07752dbe9b37d3e814fb314f0ba | /scripts/rltest/main.py | ebb6d94ab31a37bbfecaf9971e9335802be7a4b1 | [] | no_license | mfkasim1/deepmk | e07600828ec929fc397516f90abbfd86f7ac56c2 | 392964790d67d0f02b981aaba2d887c3f5a14551 | refs/heads/master | 2023-04-26T07:57:30.286946 | 2021-05-25T08:08:26 | 2021-05-25T08:08:26 | 147,033,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | py | import deepmk
import torch
import gym
import torch.nn as nn
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
import deepmk.rl.dataloaders as rlloader
from deepmk.rl.trainers import QLearn
from deepmk.rl.actors import QNet
from deepmk.rl import train, show
# set up the training components
env = gym.make("CartPole-v0")
model = nn.Sequential(
nn.Linear(4, 100),
nn.Sigmoid(),
nn.Linear(100, env.action_space.n)
)
actor = QNet(model, epsilon=0.1)
optimizer = optim.SGD(model.parameters(),
lr=1e-2, momentum=0.01)
rldataloader = rlloader.ReplayMemoryLoader(batch_size=10, shuffle=True)
trainer = QLearn(actor, optimizer, gamma=0.99,
# rldataloader=rldataloader
)
# scheduler = lr_scheduler.StepLR(optimizer, step_size=100, gamma=0.1)
# train the model
model = train(env, trainer, model, actor,
reward_preproc=lambda x:x, scheduler=None, num_episodes=10000,
val_every=20, val_episodes=10, verbose=1, plot=1,
save_wts_to="cartpole1.pkl")
# show(env, model, actor, load_wts_from="cartpole.pkl")
| [
"firman.kasim@gmail.com"
] | firman.kasim@gmail.com |
736d0d735cacf199d75cdf98f81f5ed5fd8aa6fb | 380a47268c5975473a2e7c38c747bc3bdbd981b1 | /benchmark/third_party/DeepSpeed/deepspeed/autotuning/tuner/cost_model.py | 0cdcef6483b4417e033f6e130c81eba7dc7b7d47 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | FMInference/FlexGen | 07aa9b1918c19b02077e13ad07e76840843810dd | d34f7b4b43ed87a374f394b0535ed685af66197b | refs/heads/main | 2023-07-24T02:29:51.179817 | 2023-07-21T22:38:31 | 2023-07-21T22:38:31 | 602,270,517 | 6,821 | 411 | Apache-2.0 | 2023-07-07T22:59:24 | 2023-02-15T21:18:53 | Python | UTF-8 | Python | false | false | 1,723 | py | from .utils import *
try:
import xgboost as xgb
except ImportError:
xgb = None
class XGBoostCostModel():
def __init__(self, loss_type, num_threads=None, log_interval=25, upper_model=None):
assert xgb is not None, "missing requirements, please install deepspeed w. 'autotuning_ml' extra."
self.loss_type = loss_type
if loss_type == "reg":
self.xgb_params = {
"max_depth": 3,
"gamma": 0.0001,
"min_child_weight": 1,
"subsample": 1.0,
"eta": 0.3,
"lambda": 1.0,
"alpha": 0,
"objective": "reg:linear",
}
elif loss_type == "rank":
self.xgb_params = {
"max_depth": 3,
"gamma": 0.0001,
"min_child_weight": 1,
"subsample": 1.0,
"eta": 0.3,
"lambda": 1.0,
"alpha": 0,
"objective": "rank:pairwise",
}
else:
raise RuntimeError("Invalid loss type: " + loss_type)
self.xgb_params["verbosity"] = 0
if num_threads:
self.xgb_params["nthread"] = num_threads
def fit(self, xs, ys):
x_train = np.array(xs, dtype=np.float32)
y_train = np.array(ys, dtype=np.float32)
y_max = np.max(y_train)
y_train = y_train / max(y_max, 1e-9)
index = np.random.permutation(len(x_train))
dtrain = xgb.DMatrix(x_train[index], y_train[index])
self.bst = xgb.train(self.xgb_params, dtrain)
def predict(self, xs):
features = xgb.DMatrix(xs)
return self.bst.predict(features)
| [
"sqy1415@gmail.com"
] | sqy1415@gmail.com |
63601989f71d582a2eec18528e01b6995527a9de | 2aba62d66c2c622bdc148cef451da76cae5fd76c | /exercise/learn_python_dm2039/ch30/ch30_6.py | 2d0948851697dcf6dd8ee2c7187bb6e577979064 | [] | no_license | NTUT-109AB8011/crawler | 6a76de2ab1848ebc8365e071e76c08ca7348be62 | a703ec741b48d3af615a757fed7607b1f8eb66a6 | refs/heads/master | 2023-03-26T22:39:59.527175 | 2021-03-30T03:29:22 | 2021-03-30T03:29:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | # ch30_6.py
import datetime
deltaTime = datetime.timedelta(days=100)
timeNow = datetime.datetime.now()
print("現在時間是 : ", timeNow)
print("100天後是 : ", timeNow + deltaTime)
| [
"terranandes@gmail.com"
] | terranandes@gmail.com |
513aefbd73b1950b907fd41d6a90aed13b4154f3 | 71d3555e12cb53ea81b097dfdf760d4aae8830d9 | /triton_transformer/transformer.py | cec34fb30038a73fac4b467207a3d7c094c4c56a | [
"MIT"
] | permissive | armheb/triton-transformer | 35cc60d8842503a71f43545007ff83197a93dc43 | 21b8490848ff00906b4e688e22017508580e5ec7 | refs/heads/main | 2023-08-27T14:37:36.483284 | 2021-10-04T21:39:35 | 2021-10-04T21:39:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,166 | py | import torch
from torch import nn, einsum
import torch.nn.functional as F
from einops import rearrange
from triton_transformer.layernorm import layernorm
from triton_transformer.softmax import softmax
from triton_transformer.cross_entropy import cross_entropy_fn
from triton_transformer.bmm import fused_relu_squared
from triton_transformer.dropout import dropout_fn
from triton_transformer.utils import exists, default
# helpers classes
class Attention(nn.Module):
def __init__(
self,
dim,
dim_head = 64,
heads = 8,
dropout = 0.,
use_triton = False
):
super().__init__()
self.use_triton = use_triton
self.heads = heads
self.scale = dim_head ** -0.5
inner_dim = dim_head * heads
self.dropout = dropout
self.norm = nn.LayerNorm(dim)
self.to_qkv = nn.Linear(dim, inner_dim * 3, bias = False)
self.to_out = nn.Linear(inner_dim, dim)
def forward(self, x, mask = None, use_triton = None):
use_triton = default(use_triton, self.use_triton)
h = self.heads
x = layernorm(x, self.norm.weight, self.norm.bias, use_triton = use_triton)
q, k, v = self.to_qkv(x).chunk(3, dim = -1)
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h = h), (q, k, v))
q = q * self.scale
sim = einsum('b i d, b j d -> b i j', q, k)
if exists(mask):
mask_value = -torch.finfo(sim.dtype).max
sim = sim.masked_fill(mask, mask_value)
attn = softmax(sim, use_triton = use_triton)
attn = dropout_fn(attn, self.dropout, use_triton = use_triton)
out = einsum('b i j, b j d -> b i d', attn, v)
out = rearrange(out, '(b h) n d -> b n (h d)', h = h)
out = self.to_out(out)
return dropout_fn(out, self.dropout, use_triton = use_triton)
class FeedForward(nn.Module):
def __init__(
self,
dim,
mult = 4,
dropout = 0.,
use_triton = False
):
super().__init__()
self.use_triton = use_triton
inner_dim = dim * mult
self.dropout = dropout
self.norm = nn.LayerNorm(dim)
self.proj_in_weight = nn.Parameter(torch.randn(dim, inner_dim))
self.proj_in_bias = nn.Parameter(torch.randn(inner_dim))
self.proj_out = nn.Linear(inner_dim, dim)
def forward(self, x, use_triton = None):
use_triton = default(use_triton, self.use_triton)
x = layernorm(x, self.norm.weight, self.norm.bias, use_triton = use_triton)
x = fused_relu_squared(x, self.proj_in_weight, self.proj_in_bias, use_triton = use_triton)
x = dropout_fn(x, self.dropout, use_triton = use_triton)
x = self.proj_out(x)
return x
# main class
class Transformer(nn.Module):
def __init__(
self,
*,
dim,
num_tokens,
max_seq_len,
depth,
causal = False,
heads = 8,
dim_head = 64,
ff_dropout = 0.,
attn_dropout = 0.,
use_triton = False
):
super().__init__()
self.max_seq_len = max_seq_len
self.token_emb = nn.Embedding(num_tokens, dim)
self.pos_emb = nn.Embedding(max_seq_len, dim)
self.layers = nn.ModuleList([])
for _ in range(depth):
self.layers.append(nn.ModuleList([
Attention(dim, heads = heads, dim_head = dim_head, dropout = attn_dropout, use_triton = use_triton),
FeedForward(dim, dropout = ff_dropout, use_triton = use_triton)
]))
self.norm = nn.LayerNorm(dim)
self.to_logits = nn.Linear(dim, num_tokens)
# mask
self.use_triton = use_triton
self.causal = causal
mask = torch.ones(max_seq_len, max_seq_len, dtype = torch.bool).triu(1) if causal else None
self.register_buffer('mask', mask, persistent = False)
def forward(
self,
x,
mask = None,
*,
labels = None,
use_triton = None
):
use_triton = default(use_triton, self.use_triton)
n, device = x.shape[1], x.device
# embed token and add positional embedding
x = self.token_emb(x)
pos_emb = self.pos_emb(torch.arange(n, device = device))
x = x + rearrange(pos_emb, 'n d -> () n d')
# generate mask, depending on whether autoregressive or not
if self.causal:
mask = self.mask[:n, :n]
mask = rearrange(mask, 'i j -> () i j')
elif exists(mask):
mask = rearrange(mask, 'b i -> b i ()') * rearrange(mask, 'b j -> b () j')
mask = ~mask
# go through layers
for attn, ff in self.layers:
x = attn(x, mask = mask, use_triton = use_triton) + x
x = ff(x, use_triton = use_triton) + x
x = layernorm(x, self.norm.weight, self.norm.bias, use_triton = use_triton)
logits = self.to_logits(x)
if not exists(labels):
return logits
loss = cross_entropy_fn(logits, labels, ignore_index = 0, use_triton = use_triton)
return loss
| [
"lucidrains@gmail.com"
] | lucidrains@gmail.com |
ea8e49c1dea6a14302a8664d5771195ac941f80d | a4a01e251b194f6d3c6654a2947a33fec2c03e80 | /PythonWeb/Django/1809/Day04/Day03/Day03/settings.py | c877ec1c29f649d09189c38e7276ea5a8f83b89a | [] | no_license | demo112/1809 | 033019043e2e95ebc637b40eaf11c76bfd089626 | e22972229e5e7831dce2aae0b53ce19a6e3bb106 | refs/heads/master | 2020-04-09T07:10:49.906231 | 2019-02-27T13:08:45 | 2019-02-27T13:08:45 | 160,143,869 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,229 | py | """
Django settings for Day03 project.
Generated by 'django-admin startproject' using Django 1.11.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'xwcf-oqk0j8oyt(xn9*h!i3#p_rc4y*7m2i@^%m&yh80r%1bmx'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'index',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Day03.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Day03.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'webdb',
'USER': 'root',
'PASSWORD' : '123456',
'HOST' : 'localhost',
'PORT' : '3306',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"huafengdongji@hotmail.com"
] | huafengdongji@hotmail.com |
d2de17bafbf275d1cf8507a86b58db7f9b8add31 | 12f006a0e5d75ef2349d4ae519c1c9cac5309761 | /Solution_907_4.py | 9329286bdef8e1420f5479bdb78e3ee8c2f75bfd | [] | no_license | TimothySjiang/leetcodepy | c613db16282eade713e01b7d641c0f5b341ec84b | ef64e46b8833a684b8b0355ce576b767a0e03596 | refs/heads/master | 2020-07-01T14:48:35.953841 | 2020-01-12T06:19:44 | 2020-01-12T06:19:44 | 201,199,810 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | class Solution:
def sumSubarrayMins(self, A: List[int]) -> int:
res = 0
s = []
A = [0] + A + [0]
for i, x in enumerate(A):
while s and A[s[-1]] > x:
j = s.pop()
k = s[-1]
res += A[j] * (i - j) * (j - k)
s.append(i)
return res % (10**9 + 7) | [
"shjiang@ucdavis.edu"
] | shjiang@ucdavis.edu |
19cb0743d38d61e67b8a296a8ff922ade33a70bf | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/chromite/lib/buildbucket_lib.py | b265dfb3957534eebdfee3eb670a7723a2b8cf7d | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Python | false | false | 17,971 | py | # -*- coding: utf-8 -*-
# Copyright 2016 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Code releated to buildbucket.
Buildbucket is a generic build queue. A build requester can schedule
a build and wait for a result. A building system, such as Buildbot,
can lease it, build it and report a result back.
For more documents about buildbucket, please refer to:
'https://chromium.googlesource.com/infra/infra/+/master/appengine/
cr-buildbucket/README.md'
"""
from __future__ import print_function
import collections
import json
import os
import urllib
from chromite.cbuildbot import topology
from chromite.lib.const import waterfall
from chromite.lib import auth
from chromite.lib import config_lib
from chromite.lib import constants
from chromite.lib import cros_logging as logging
from chromite.lib import retry_util
# Methods
PUT_METHOD = 'PUT'
POST_METHOD = 'POST'
GET_METHOD = 'GET'
# Default limit for SearchAllBuilds.
SEARCH_LIMIT_DEFAULT = 100
# Default limit for max_builds in Buildbucket search requests,
# max_builds cannot be larger than 100 in Buildbucket search requests
MAX_BUILDS_LIMIT = 100
# Default max_builds number
MAX_BUILDS_DEFAULT = 10
WATERFALL_BUCKET_MAP = {
waterfall.WATERFALL_INTERNAL:
constants.CHROMEOS_BUILDBUCKET_BUCKET,
waterfall.WATERFALL_EXTERNAL:
constants.CHROMIUMOS_BUILDBUCKET_BUCKET,
waterfall.WATERFALL_RELEASE:
constants.CHROMEOS_RELEASE_BUILDBUCKET_BUCKET
}
# A running build on a buildbot should determin the buildbucket
# instance based on the topology information.
# To trigger a tryjob using buildbucket, the buildbucket instance
# should be determined by the test-tryjob option in cbuildbot.
# Buildbucket host instance
BUILDBUCKET_HOST = 'cr-buildbucket.appspot.com'
# Buildbucket test host instance
BUILDBUCKET_TEST_HOST = 'cr-buildbucket-test.appspot.com'
# Namedtupe to store buildbucket related info.
BuildbucketInfo = collections.namedtuple(
'BuildbucketInfo',
['buildbucket_id', 'retry', 'created_ts', 'status', 'result', 'url'])
class BuildbucketResponseException(Exception):
"""Exception got from Buildbucket Response."""
class NoBuildbucketBucketFoundException(Exception):
"""Failed to found the corresponding buildbucket bucket."""
class NoBuildbucketClientException(Exception):
"""No Buildbucket client exception."""
def GetServiceAccount(service_account=None):
"""Get service account file.
Args:
service_account: Service account file path.
Returns:
Return service_account path if the file exists; else, return None.
"""
if service_account and os.path.isfile(service_account):
logging.info('Get service account %s', service_account)
return service_account
return None
def GetScheduledBuildDict(scheduled_slave_list):
"""Parse the build information from the scheduled_slave_list metadata.
Treats all listed builds as newly-scheduled.
Args:
scheduled_slave_list: A list of scheduled builds recorded in the
master metadata. In the format of
[(build_config, buildbucket_id, created_ts)].
Returns:
A dict mapping build config name to its buildbucket information
(in the format of BuildbucketInfo).
"""
if scheduled_slave_list is None:
return {}
buildbucket_info_dict = {}
for (build_config, buildbucket_id, created_ts) in scheduled_slave_list:
if build_config not in buildbucket_info_dict:
buildbucket_info_dict[build_config] = BuildbucketInfo(
buildbucket_id, 0, created_ts, None, None, None)
else:
old_info = buildbucket_info_dict[build_config]
# If a slave occurs multiple times, increment retry count and keep
# the buildbucket_id and created_ts of most recently created one.
new_retry = old_info.retry + 1
if created_ts > buildbucket_info_dict[build_config].created_ts:
buildbucket_info_dict[build_config] = BuildbucketInfo(
buildbucket_id, new_retry, created_ts, None, None, None)
else:
buildbucket_info_dict[build_config] = BuildbucketInfo(
old_info.buildbucket_id, new_retry, old_info.created_ts, None, None,
None)
return buildbucket_info_dict
def GetBuildInfoDict(metadata, exclude_experimental=True):
"""Get buildbucket_info_dict from metadata.
Args:
metadata: Instance of metadata_lib.CBuildbotMetadata.
exclude_experimental: Whether to exclude the builds which are important in
the config but are marked as experimental in the tree status. Default to
True.
Returns:
buildbucket_info_dict: A dict mapping build config name to its buildbucket
information in the format of BuildbucketInfo. Build configs that are
marked experimental through the tree status will not be in the dict.
(See GetScheduledBuildDict for details.)
"""
assert metadata is not None
scheduled_slaves_list = metadata.GetValueWithDefault(
constants.METADATA_SCHEDULED_IMPORTANT_SLAVES, [])
if exclude_experimental:
experimental_builders = metadata.GetValueWithDefault(
constants.METADATA_EXPERIMENTAL_BUILDERS, [])
scheduled_slaves_list = [
(config, bb_id, ts) for config, bb_id, ts in scheduled_slaves_list
if config not in experimental_builders
]
return GetScheduledBuildDict(scheduled_slaves_list)
def GetBuildbucketIds(metadata, exclude_experimental=True):
"""Get buildbucket_ids of scheduled slave builds from metadata.
Args:
metadata: Instance of metadata_lib.CBuildbotMetadata.
exclude_experimental: Whether to exclude the builds which are important in
the config but are marked as experimental in the tree status. Default to
True.
Returns:
A list of buildbucket_ids (string) of slave builds.
"""
buildbucket_info_dict = GetBuildInfoDict(
metadata, exclude_experimental=exclude_experimental)
return [info_dict.buildbucket_id
for info_dict in buildbucket_info_dict.values()]
def FetchCurrentSlaveBuilders(config, metadata, builders_array,
exclude_experimental=True):
"""Fetch the current important slave builds.
Args:
config: Instance of config_lib.BuildConfig. Config dict of this build.
metadata: Instance of metadata_lib.CBuildbotMetadata. Metadata of this
build.
builders_array: A list of slave build configs to check.
exclude_experimental: Whether to exclude the builds which are important in
the config but are marked as experimental in the tree status. Default to
True.
Returns:
An updated list of slave build configs for a master build which uses
Buildbucket to schedule slaves; or the origin builders_array for other
masters.
"""
if (config is not None and
metadata is not None and
config_lib.UseBuildbucketScheduler(config)):
scheduled_buildbucket_info_dict = GetBuildInfoDict(
metadata, exclude_experimental=exclude_experimental)
return scheduled_buildbucket_info_dict.keys()
else:
return builders_array
class BuildbucketClient(object):
"""Buildbucket client to interact with the Buildbucket server."""
def __init__(self, get_access_token, host, **kwargs):
"""Init a BuildbucketClient instance.
Args:
get_access_token: Method to get access token.
host: The buildbucket instance to interact.
kwargs: The kwargs to pass to get_access_token.
"""
self.host = self._GetHost() if host is None else host
self.http = auth.AuthorizedHttp(get_access_token, None, **kwargs)
def _GetHost(self):
"""Get buildbucket Server host from topology."""
return topology.topology.get(topology.BUILDBUCKET_HOST_KEY)
def SendBuildbucketRequest(self, url, method, body, dryrun):
"""Generic buildbucket request.
Args:
url: Buildbucket url to send requests.
method: HTTP method to perform, such as GET, POST, DELETE.
body: The entity body to be sent with the request (a string object).
See httplib2.Http.request for details.
dryrun: Whether a dryrun.
Returns:
A dict of response entity body if the request succeeds; else, None.
See httplib2.Http.request for details.
Raises:
BuildbucketResponseException when response['status'] is invalid.
"""
if dryrun:
logging.info('Dryrun mode is on; Would have made a request '
'with url %s method %s body:\n%s', url, method, body)
return
def try_method():
response, content = self.http.request(
url,
method,
body=body,
headers={'Content-Type': 'application/json'},
)
if int(response['status']) // 100 != 2:
raise BuildbucketResponseException(
'Got a %s response from buildbucket with url: %s\n'
'content: %s' % (response['status'], url, content))
# Deserialize the content into a python dict.
return json.loads(content)
return retry_util.GenericRetry(lambda _: True, 3, try_method)
def PutBuildRequest(self, body, dryrun):
"""Send Put request to buildbucket server.
Args:
body: See body in SendBuildbucketRequest for details.
dryrun: Whether a dryrun.
Returns:
See return type of SendBuildbucketRequest.
"""
url = 'https://%(hostname)s/api/buildbucket/v1/builds' % {
'hostname': self.host
}
return self.SendBuildbucketRequest(url, PUT_METHOD, body, dryrun)
def GetBuildRequest(self, buildbucket_id, dryrun):
"""Send Get request to buildbucket server.
Args:
buildbucket_id: Buildbucket_id (string) of the build to get.
dryrun: Whether a dryrun.
Returns:
See return type of SendBuildbucketRequest.
"""
url = 'https://%(hostname)s/api/buildbucket/v1/builds/%(id)s' % {
'hostname': self.host,
'id': buildbucket_id
}
return self.SendBuildbucketRequest(url, GET_METHOD, None, dryrun)
def CancelBuildRequest(self, buildbucket_id, dryrun):
"""Send Cancel request to buildbucket server.
Args:
buildbucket_id: Buildbucket_id (string) of the build to cancel.
dryrun: Whether a dryrun.
Returns:
See return type of SendBuildbucketRequest.
"""
url = 'https://%(hostname)s/api/buildbucket/v1/builds/%(id)s/cancel' % {
'hostname': self.host,
'id': buildbucket_id
}
return self.SendBuildbucketRequest(url, POST_METHOD, '{}', dryrun)
def CancelBatchBuildsRequest(self, buildbucket_ids, dryrun):
"""Send CancelBatch request to buildbucket server.
Args:
buildbucket_ids: buildbucket_ids (string list) of the builds to cancel.
dryrun: Whether a dryrun.
Returns:
See return type of SendBuildbucketRequest.
"""
url = 'https://%(hostname)s/api/buildbucket/v1/builds/cancel' % {
'hostname': self.host
}
assert isinstance(buildbucket_ids, list)
body = json.dumps({'build_ids': buildbucket_ids})
return self.SendBuildbucketRequest(url, POST_METHOD, body, dryrun)
def RetryBuildRequest(self, buildbucket_id, dryrun):
"""Send a Retry request to the Buildbucket server.
Args:
buildbucket_id: Buildbucket_id (string) of the build to retry.
dryrun: Whether a dryrun.
Returns:
See return type of SendBuildbucketRequest.
"""
url = 'https://%(hostname)s/api/buildbucket/v1/builds/%(id)s/retry' % {
'hostname': self.host,
'id': buildbucket_id
}
return self.SendBuildbucketRequest(url, PUT_METHOD, '{}', dryrun)
def SearchBuildsRequest(self, dryrun, buckets=None, tags=None,
status=None, start_cursor=None,
max_builds=MAX_BUILDS_DEFAULT):
"""Send Search requests to the Buildbucket server.
Args:
dryrun: Whether a dryrun.
buckets: Search for builds in the buckets (string list).
tags: Search for builds containing all the tags (string list).
status: Search for builds in this status (string).
start_cursor: Search for builds with this start cursor (string).
max_builds: Maximum number of builds to return in this request (int).
Returns:
See return type of SendBuildbucketRequest.
"""
params = []
if buckets:
assert isinstance(buckets, list), 'buckets must be a list of string.'
for bucket in buckets:
params.append(('bucket', bucket))
if tags:
assert isinstance(tags, list), 'tags must be a list of string.'
for tag in tags:
params.append(('tag', tag))
if status:
if status not in constants.BUILDBUCKET_BUILDER_STATUSES:
raise ValueError('status must be one of %s' %
str(constants.BUILDBUCKET_BUILDER_STATUSES))
params.append(('status', status))
if start_cursor:
params.append(('start_cursor', start_cursor))
if max_builds is not None:
if max_builds <= 0 or max_builds > MAX_BUILDS_LIMIT:
raise ValueError('max_builds must be number in (0, %s]' %
MAX_BUILDS_LIMIT)
params.append(('max_builds', max_builds))
params_str = urllib.urlencode(params)
url = ('https://%(hostname)s/api/buildbucket/v1/search?%(params_str)s'
% {'hostname': self.host, 'params_str': params_str})
return self.SendBuildbucketRequest(url, GET_METHOD, None, dryrun)
def SearchAllBuilds(self, dryrun, limit=SEARCH_LIMIT_DEFAULT,
buckets=None, tags=None, status=None):
"""Search all qualified builds.
Args:
limit: The limit count of search results.
dryrun: Whether a dryrun.
buckets: Search for builds in the buckets (string list).
tags: Search for builds containing all the tags (string list).
status: Search for builds in this status (string).
Returns:
List of builds.
"""
if limit <= 0:
raise ValueError('limit %s must be greater than 0.')
next_cursor = None
all_builds = []
while True:
current_limit = limit - len(all_builds)
# Do not search for more than MAX_BUILDS_LIMIT builds in one request.
max_builds = (MAX_BUILDS_LIMIT if current_limit > MAX_BUILDS_LIMIT
else current_limit)
content = self.SearchBuildsRequest(
dryrun, buckets=buckets, tags=tags, status=status,
start_cursor=next_cursor, max_builds=max_builds)
builds = GetNestedAttr(content, ['builds'], default=[])
if not builds:
logging.debug('No build found.')
break
all_builds.extend(builds)
if len(all_builds) >= limit:
logging.info('Reached the search limit %s', limit)
break
next_cursor = GetNestedAttr(content, ['next_cursor'])
if next_cursor is None:
logging.debug('No next_cursor in the response.')
break
return all_builds
def GetNestedAttr(content, nested_attr, default=None):
"""Get the (nested) attribuite from content.
Get the (nested) attribute from the content dict.
E.X. content is {key1: {key2: {key3: value3}}, key4: value4}
nested_attr = [key1] gets {key2: value2, {key3: value3}}
nested_attr = [key1, key2] gets {key3: value3}
nested_attr = [key1, key2, key3] gets value3
nested_attr = [key5] gets the default value.
Args:
content: A dict of (nested) attributes.
nested_attr: String list presenting the (nested) attribute to get.
default: Default value to return if the attribute doesn't exist.
Returns:
The corresponding value if the attribute exists; else, default.
"""
assert isinstance(nested_attr, list), 'nested_attr must be a list.'
if content is None:
return default
assert isinstance(content, dict), 'content must be a dict.'
value = content
for attr in nested_attr:
assert isinstance(attr, str), 'attribute name must be a string.'
if not isinstance(value, dict):
return default
value = value.get(attr, default)
return value
# Error reason for Cancel requests.
def GetErrorReason(content):
return GetNestedAttr(content, ['error', 'reason'])
def GetErrorMessage(content):
return GetNestedAttr(content, ['error', 'message'])
# Failure reason for FAILURE builds.
def GetBuildFailureReason(content):
return GetNestedAttr(content, ['build', 'failure_reason'])
# Cancelation reason for CANCELED builds.
def GetBuildCancelationReason(content):
return GetNestedAttr(content, ['build', 'cancelation_reason'])
def GetBuildURL(content):
return GetNestedAttr(content, ['build', 'url'])
def GetBuildId(content):
return GetNestedAttr(content, ['build', 'id'])
def GetBuildStatus(content):
return GetNestedAttr(content, ['build', 'status'])
def GetBuildResult(content):
return GetNestedAttr(content, ['build', 'result'])
def GetBuildCreated_ts(content):
return GetNestedAttr(content, ['build', 'created_ts'])
def GetBuildStartedTS(content):
return GetNestedAttr(content, ['build', 'started_ts'])
def GetBuildCompletedTS(content):
return GetNestedAttr(content, ['build', 'completed_ts'])
def GetBuildIds(content):
builds = GetNestedAttr(content, ['builds'], default=[])
return [b.get('id') for b in builds]
def ExtractBuildIds(builds):
return ([b.get('id') for b in builds] if builds is not None else [])
def GetResultMap(content):
"""Get a build_id to result map."""
build_result_map = {}
results = GetNestedAttr(content, ['results'], default=[])
for r in results:
if r.get('build_id') is not None:
build_id = r.pop('build_id')
build_result_map[build_id] = r
return build_result_map
def GetBuildTags(content, tag):
"""Return a list of tag values given the tag name."""
tags = GetNestedAttr(content, ['build', 'tags'])
result = []
for t in tags:
tag_pair = t.split(':')
if tag_pair[0] == tag:
result.append(tag_pair[1])
return result
| [
"arnaud@geometry.ee"
] | arnaud@geometry.ee |
3eadf25d4ee4c8cd157ea0d95e9b6ef32ddf51ea | 0062a19abf334cdb8e7927270106b9ca496ba42e | /boxOffice/admin.py | 88bcf3e8dcb677f3c83572f05a05697efb6f6936 | [] | no_license | Dawinia/movie_backend | 177099d35c5a8f6e89851a3e1e662563d24c242a | 650aa4d7540bc02639a0832069cc540e6c3df3bc | refs/heads/master | 2022-04-22T01:35:54.886991 | 2020-04-23T17:21:30 | 2020-04-23T17:21:30 | 258,153,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 576 | py | from django.contrib import admin
from .models import BoxOffice, MovieInfo
# Register your models here.
class BoxOfficeAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['yearRate', 'crawlDate']}),
('movieInfo', {'fields': ['movieId', 'movieName', 'releaseInfo']}),
('boxOfficeInfo', {'fields': ['boxRate', 'boxInfo', 'splitBoxInfo', 'sumBoxInfo', 'splitSumBoxInfo']}),
('showInfo', {'fields': ['showInfo', 'showView', 'showRate', 'seatRate']})
]
admin.site.register(BoxOffice, BoxOfficeAdmin)
admin.site.register(MovieInfo)
| [
"dawinialo@163.com"
] | dawinialo@163.com |
18a0917a75efd7f2cd47a0e6aaa80d5fc91621a4 | b4945f3392edb5e61be264075fdd93e28f894725 | /autobahn/autobahn/wamp/message.py | 39220ffcafdb826a1098eaadf2db4a4b40495386 | [
"Apache-2.0"
] | permissive | quyuanbo/AutobahnPython | e737c9758a116f95967b114ded84decacc576b45 | 25898545e68ad6673d4a367799efbbd2c29488dc | refs/heads/master | 2021-01-14T12:11:39.251365 | 2014-04-11T00:25:09 | 2014-04-11T00:25:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84,620 | py | ###############################################################################
##
## Copyright (C) 2013-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
__all__ = ['Hello',
'Welcome',
'Abort',
'Challenge',
'Authenticate',
'Goodbye',
'Heartbeat'
'Error',
'Publish',
'Published',
'Subscribe',
'Subscribed',
'Unsubscribe',
'Unsubscribed',
'Event',
'Call',
'Cancel',
'Result',
'Register',
'Registered',
'Unregister',
'Unregistered',
'Invocation',
'Interrupt',
'Yield']
import re
import six
from zope.interface import implementer
import autobahn
from autobahn import util
from autobahn.wamp.exception import ProtocolError
from autobahn.wamp.interfaces import IMessage
from autobahn.wamp.role import ROLE_NAME_TO_CLASS
## strict URI check allowing empty URI components
_URI_PAT_STRICT = re.compile(r"^(([0-9a-z_]{2,}\.)|\.)*([0-9a-z_]{2,})?$")
## loose URI check allowing empty URI components
_URI_PAT_LOOSE = re.compile(r"^(([^\s\.#]+\.)|\.)*([^\s\.#]+)?$")
## strict URI check disallowing empty URI components
_URI_PAT_STRICT_NON_EMPTY = re.compile(r"^([0-9a-z_]{2,}\.)*([0-9a-z_]{2,})?$")
## loose URI check disallowing empty URI components
_URI_PAT_LOOSE_NON_EMPTY = re.compile(r"^([^\s\.#]+\.)*([^\s\.#]+)?$")
def check_or_raise_uri(value, message):
if type(value) != six.text_type:
raise ProtocolError("{}: invalid type {} for URI".format(message, type(value)))
if not _URI_PAT_LOOSE.match(value):
raise ProtocolError("{}: invalid value '{}' for URI".format(message, value))
return value
def check_or_raise_id(value, message):
if type(value) not in six.integer_types:
raise ProtocolError("{}: invalid type {} for ID".format(message, type(value)))
if value < 0 or value > 9007199254740992: # 2**53
raise ProtocolError("{}: invalid value {} for ID".format(message, value))
return value
def check_or_raise_extra(value, message):
if type(value) != dict:
raise ProtocolError("{}: invalid type {}".format(message, type(value)))
for k in value.keys():
if type(k) != six.text_type:
raise ProtocolError("{}: invalid type {} for key '{}'".format(message, type(k), k))
return value
class Message(util.EqualityMixin):
"""
WAMP message base class. This is not supposed to be instantiated.
"""
def __init__(self):
"""
Base constructor.
"""
## serialization cache: mapping from ISerializer instances
## to serialized bytes
##
self._serialized = {}
def uncache(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.uncache`
"""
self._serialized = {}
def serialize(self, serializer):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.serialize`
"""
## only serialize if not cached ..
if not serializer in self._serialized:
self._serialized[serializer] = serializer.serialize(self.marshal())
return self._serialized[serializer]
@implementer(IMessage)
class Hello(Message):
"""
A WAMP `HELLO` message.
Format: `[HELLO, Realm|uri, Details|dict]`
"""
MESSAGE_TYPE = 1
"""
The WAMP message code for this type of message.
"""
def __init__(self, realm, roles, authmethods = None):
"""
Message constructor.
:param realm: The URI of the WAMP realm to join.
:type realm: str
:param roles: The WAMP roles to announce.
:type roles: list of :class:`autobahn.wamp.role.RoleFeatures`
"""
assert(type(realm) == six.text_type)
assert(type(roles) == list)
for role in roles:
assert(isinstance(role, autobahn.wamp.role.RoleFeatures))
if authmethods:
assert(type(authmethods) == list)
for authmethod in authmethods:
assert(type(authmethod) == six.text_type)
Message.__init__(self)
self.realm = realm
self.roles = roles
self.authmethods = authmethods
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Hello.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for HELLO".format(len(wmsg)))
realm = check_or_raise_uri(wmsg[1], "'realm' in HELLO")
details = check_or_raise_extra(wmsg[2], "'details' in HELLO")
roles = []
if not u'roles' in details:
raise ProtocolError("missing mandatory roles attribute in options in HELLO")
details_roles = check_or_raise_extra(details[u'roles'], "'roles' in 'details' in HELLO")
if len(details_roles) == 0:
raise ProtocolError("empty 'roles' in 'details' in HELLO")
for role in details_roles:
if role not in ROLE_NAME_TO_CLASS:
raise ProtocolError("invalid role '{}' in 'roles' in 'details' in HELLO".format(role))
details_role = check_or_raise_extra(details_roles[role], "role '{}' in 'roles' in 'details' in HELLO".format(role))
if u'features' in details_role:
details_role_features = check_or_raise_extra(details_role[u'features'], "'features' in role '{}' in 'roles' in 'details' in HELLO".format(role))
## FIXME: skip unknown attributes
role_features = ROLE_NAME_TO_CLASS[role](**details_role[u'features'])
else:
role_features = ROLE_NAME_TO_CLASS[role]()
roles.append(role_features)
authmethods = None
if u'authmethods' in details:
details_authmethods = details[u'authmethods']
if type(details_authmethods) != list:
raise ProtocolError("invalid type {} for 'authmethods' detail in HELLO".format(type(details_authmethods)))
for auth_method in details_authmethods:
if type(auth_method) != six.text_type:
raise ProtocolError("invalid type {} for item in 'authmethods' detail in HELLO".format(type(auth_method)))
authmethods = details_authmethods
obj = Hello(realm, roles, authmethods)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {u'roles': {}}
for role in self.roles:
details[u'roles'][role.ROLE] = {}
for feature in role.__dict__:
if not feature.startswith('_') and feature != 'ROLE' and getattr(role, feature) is not None:
if not u'features' in details[u'roles'][role.ROLE]:
details[u'roles'][role.ROLE] = {u'features': {}}
details[u'roles'][role.ROLE][u'features'][six.u(feature)] = getattr(role, feature)
if self.authmethods:
details[u'authmethods'] = self.authmethods
return [Hello.MESSAGE_TYPE, self.realm, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP HELLO Message (realm = {}, roles = {}, authmethods = {})".format(self.realm, self.roles, self.authmethods)
@implementer(IMessage)
class Welcome(Message):
"""
A WAMP `WELCOME` message.
Format: `[WELCOME, Session|id, Details|dict]`
"""
MESSAGE_TYPE = 2
"""
The WAMP message code for this type of message.
"""
def __init__(self, session, roles, authid = None, authrole = None, authmethod = None):
"""
Message constructor.
:param session: The WAMP session ID the other peer is assigned.
:type session: int
"""
assert(type(session) in six.integer_types)
assert(type(roles) == list)
for role in roles:
assert(isinstance(role, autobahn.wamp.role.RoleFeatures))
assert(authid is None or type(authid) == six.text_type)
assert(authrole is None or type(authrole) == six.text_type)
assert(authmethod is None or type(authmethod) == six.text_type)
Message.__init__(self)
self.session = session
self.roles = roles
self.authid = authid
self.authrole = authrole
self.authmethod = authmethod
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Welcome.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for WELCOME".format(len(wmsg)))
session = check_or_raise_id(wmsg[1], "'session' in WELCOME")
details = check_or_raise_extra(wmsg[2], "'details' in WELCOME")
authid = details.get(u'authid', None)
authrole = details.get(u'authrole', None)
authmethod = details.get(u'authmethod', None)
roles = []
if not u'roles' in details:
raise ProtocolError("missing mandatory roles attribute in options in WELCOME")
details_roles = check_or_raise_extra(details['roles'], "'roles' in 'details' in WELCOME")
if len(details_roles) == 0:
raise ProtocolError("empty 'roles' in 'details' in WELCOME")
for role in details_roles:
if role not in ROLE_NAME_TO_CLASS:
raise ProtocolError("invalid role '{}' in 'roles' in 'details' in WELCOME".format(role))
if u'features' in details_roles[role]:
details_role_features = check_or_raise_extra(details_roles[role][u'features'], "'features' in role '{}' in 'roles' in 'details' in WELCOME".format(role))
## FIXME: skip unknown attributes
role_features = ROLE_NAME_TO_CLASS[role](**details_roles[role][u'features'])
else:
role_features = ROLE_NAME_TO_CLASS[role]()
roles.append(role_features)
obj = Welcome(session, roles, authid, authrole, authmethod)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {
u'roles': {}
}
if self.authid:
details[u'authid'] = self.authid
if self.authrole:
details[u'authrole'] = self.authrole
if self.authrole:
details[u'authmethod'] = self.authmethod
for role in self.roles:
details[u'roles'][role.ROLE] = {}
for feature in role.__dict__:
if not feature.startswith('_') and feature != 'ROLE' and getattr(role, feature) is not None:
if not u'features' in details[u'roles'][role.ROLE]:
details[u'roles'][role.ROLE] = {u'features': {}}
details[u'roles'][role.ROLE][u'features'][six.u(feature)] = getattr(role, feature)
return [Welcome.MESSAGE_TYPE, self.session, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP WELCOME Message (session = {}, roles = {}, authid = {}, authrole = {}, authmethod = {})".format(self.session, self.roles, self.authid, self.authrole, self.authmethod)
@implementer(IMessage)
class Abort(Message):
"""
A WAMP `ABORT` message.
Format: `[ABORT, Details|dict, Reason|uri]`
"""
MESSAGE_TYPE = 3
"""
The WAMP message code for this type of message.
"""
def __init__(self, reason, message = None):
"""
Message constructor.
:param reason: WAMP or application error URI for aborting reason.
:type reason: str
:param message: Optional human-readable closing message, e.g. for logging purposes.
:type message: str
"""
assert(type(reason) == six.text_type)
assert(message is None or type(message) == six.text_type)
Message.__init__(self)
self.reason = reason
self.message = message
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Abort.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for ABORT".format(len(wmsg)))
details = check_or_raise_extra(wmsg[1], "'details' in ABORT")
reason = check_or_raise_uri(wmsg[2], "'reason' in ABORT")
message = None
if u'message' in details:
details_message = details[u'message']
if type(details_message) != six.text_type:
raise ProtocolError("invalid type {} for 'message' detail in ABORT".format(type(details_message)))
message = details_message
obj = Abort(reason, message)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.message:
details[u'message'] = self.message
return [Abort.MESSAGE_TYPE, details, self.reason]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP ABORT Message (message = {}, reason = {})".format(self.message, self.reason)
@implementer(IMessage)
class Challenge(Message):
"""
A WAMP `CHALLENGE` message.
Format: `[CHALLENGE, Method|string, Extra|dict]`
"""
MESSAGE_TYPE = 4
"""
The WAMP message code for this type of message.
"""
def __init__(self, method, extra = {}):
"""
Message constructor.
:param method: The authentication method.
:type method: str
:param extra: Authentication method specific information.
:type extra: dict
"""
assert(type(method) == six.text_type)
assert(type(extra) == dict)
Message.__init__(self)
self.method = method
self.extra = extra
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Challenge.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for CHALLENGE".format(len(wmsg)))
method = wmsg[1]
if type(method) != str:
raise ProtocolError("invalid type {} for 'method' in CHALLENGE".format(type(method)))
extra = check_or_raise_extra(wmsg[2], "'extra' in CHALLENGE")
obj = Challenge(method, extra)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Challenge.MESSAGE_TYPE, self.method, self.extra]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP CHALLENGE Message (method = {}, extra = {})".format(self.method, self.extra)
@implementer(IMessage)
class Authenticate(Message):
"""
A WAMP `AUTHENTICATE` message.
Format: `[AUTHENTICATE, Signature|string, Extra|dict]`
"""
MESSAGE_TYPE = 5
"""
The WAMP message code for this type of message.
"""
def __init__(self, signature):
"""
Message constructor.
:param signature: The signature for the authentication challenge.
:type signature: str
"""
assert(type(signature) == six.text_type)
Message.__init__(self)
self.signature = signature
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Authenticate.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for AUTHENTICATE".format(len(wmsg)))
signature = wmsg[1]
if type(signature) != six.text_type:
raise ProtocolError("invalid type {} for 'signature' in AUTHENTICATE".format(type(signature)))
extra = check_or_raise_extra(wmsg[2], "'extra' in AUTHENTICATE")
obj = Authenticate(signature)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
extra = {}
return [Authenticate.MESSAGE_TYPE, self.signature, extra]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP AUTHENTICATE Message (signature = {})".format(self.signature)
@implementer(IMessage)
class Goodbye(Message):
"""
A WAMP `GOODBYE` message.
Format: `[GOODBYE, Details|dict, Reason|uri]`
"""
MESSAGE_TYPE = 6
"""
The WAMP message code for this type of message.
"""
DEFAULT_REASON = u"wamp.goodbye.normal"
"""
Default WAMP closing reason.
"""
def __init__(self, reason = DEFAULT_REASON, message = None):
"""
Message constructor.
:param reason: Optional WAMP or application error URI for closing reason.
:type reason: str
:param message: Optional human-readable closing message, e.g. for logging purposes.
:type message: str
"""
assert(type(reason) == six.text_type)
assert(message is None or type(message) == six.text_type)
Message.__init__(self)
self.reason = reason
self.message = message
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Goodbye.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for GOODBYE".format(len(wmsg)))
details = check_or_raise_extra(wmsg[1], "'details' in GOODBYE")
reason = check_or_raise_uri(wmsg[2], "'reason' in GOODBYE")
message = None
if u'message' in details:
details_message = details[u'message']
if type(details_message) != six.text_type:
raise ProtocolError("invalid type {} for 'message' detail in GOODBYE".format(type(details_message)))
message = details_message
obj = Goodbye(reason, message)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.message:
details[u'message'] = self.message
return [Goodbye.MESSAGE_TYPE, details, self.reason]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP GOODBYE Message (message = {}, reason = {})".format(self.message, self.reason)
@implementer(IMessage)
class Heartbeat(Message):
"""
A WAMP `HEARTBEAT` message.
Formats:
* `[HEARTBEAT, Incoming|integer, Outgoing|integer]`
* `[HEARTBEAT, Incoming|integer, Outgoing|integer, Discard|string]`
"""
MESSAGE_TYPE = 7
"""
The WAMP message code for this type of message.
"""
def __init__(self, incoming, outgoing, discard = None):
"""
Message constructor.
:param incoming: Last incoming heartbeat processed from peer.
:type incoming: int
:param outgoing: Outgoing heartbeat.
:type outgoing: int
:param discard: Optional data that is discared by peer.
:type discard: str
"""
assert(type(incoming) in six.integer_types)
assert(type(outgoing) in six.integer_types)
assert(discard is None or type(discard) == six.text_type)
Message.__init__(self)
self.incoming = incoming
self.outgoing = outgoing
self.discard = discard
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Heartbeat.MESSAGE_TYPE)
if len(wmsg) not in [3, 4]:
raise ProtocolError("invalid message length {} for HEARTBEAT".format(len(wmsg)))
incoming = wmsg[1]
if type(incoming) not in six.integer_types:
raise ProtocolError("invalid type {} for 'incoming' in HEARTBEAT".format(type(incoming)))
if incoming < 0: # must be non-negative
raise ProtocolError("invalid value {} for 'incoming' in HEARTBEAT".format(incoming))
outgoing = wmsg[2]
if type(outgoing) not in six.integer_types:
raise ProtocolError("invalid type {} for 'outgoing' in HEARTBEAT".format(type(outgoing)))
if outgoing <= 0: # must be positive
raise ProtocolError("invalid value {} for 'outgoing' in HEARTBEAT".format(outgoing))
discard = None
if len(wmsg) > 3:
discard = wmsg[3]
if type(discard) != six.text_type:
raise ProtocolError("invalid type {} for 'discard' in HEARTBEAT".format(type(discard)))
obj = Heartbeat(incoming, outgoing, discard = discard)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
if self.discard:
return [Heartbeat.MESSAGE_TYPE, self.incoming, self.outgoing, self.discard]
else:
return [Heartbeat.MESSAGE_TYPE, self.incoming, self.outgoing]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP HEARTBEAT Message (incoming {}, outgoing = {}, len(discard) = {})".format(self.incoming, self.outgoing, len(self.discard) if self.discard else None)
@implementer(IMessage)
class Error(Message):
"""
A WAMP `ERROR` message.
Formats:
* `[ERROR, REQUEST.Type|int, REQUEST.Request|id, Details|dict, Error|uri]`
* `[ERROR, REQUEST.Type|int, REQUEST.Request|id, Details|dict, Error|uri, Arguments|list]`
* `[ERROR, REQUEST.Type|int, REQUEST.Request|id, Details|dict, Error|uri, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 8
"""
The WAMP message code for this type of message.
"""
def __init__(self, request_type, request, error, args = None, kwargs = None):
"""
Message constructor.
:param request_type: The WAMP message type code for the original request.
:type request_type: int
:param request: The WAMP request ID of the original request (`Call`, `Subscribe`, ...) this error occured for.
:type request: int
:param error: The WAMP or application error URI for the error that occured.
:type error: str
:param args: Positional values for application-defined exception.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined exception.
Must be serializable using any serializers in use.
:type kwargs: dict
"""
assert(type(request_type) in six.integer_types)
assert(type(request) in six.integer_types)
assert(type(error) == six.text_type)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
Message.__init__(self)
self.request_type = request_type
self.request = request
self.error = error
self.args = args
self.kwargs = kwargs
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Error.MESSAGE_TYPE)
if len(wmsg) not in (5, 6, 7):
raise ProtocolError("invalid message length {} for ERROR".format(len(wmsg)))
request_type = wmsg[1]
if type(request_type) not in six.integer_types:
raise ProtocolError("invalid type {} for 'request_type' in ERROR".format(request_type))
if request_type not in [Subscribe.MESSAGE_TYPE,
Unsubscribe.MESSAGE_TYPE,
Publish.MESSAGE_TYPE,
Register.MESSAGE_TYPE,
Unregister.MESSAGE_TYPE,
Call.MESSAGE_TYPE,
Invocation.MESSAGE_TYPE]:
raise ProtocolError("invalid value {} for 'request_type' in ERROR".format(request_type))
request = check_or_raise_id(wmsg[2], "'request' in ERROR")
details = check_or_raise_extra(wmsg[3], "'details' in ERROR")
error = check_or_raise_uri(wmsg[4], "'error' in ERROR")
args = None
if len(wmsg) > 5:
args = wmsg[5]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in ERROR".format(type(args)))
kwargs = None
if len(wmsg) > 6:
kwargs = wmsg[6]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in ERROR".format(type(kwargs)))
obj = Error(request_type, request, error, args = args, kwargs = kwargs)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.kwargs:
return [self.MESSAGE_TYPE, self.request_type, self.request, details, self.error, self.args, self.kwargs]
elif self.args:
return [self.MESSAGE_TYPE, self.request_type, self.request, details, self.error, self.args]
else:
return [self.MESSAGE_TYPE, self.request_type, self.request, details, self.error]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP Error Message (request_type = {}, request = {}, error = {}, args = {}, kwargs = {})".format(self.request_type, self.request, self.error, self.args, self.kwargs)
@implementer(IMessage)
class Publish(Message):
"""
A WAMP `PUBLISH` message.
Formats:
* `[PUBLISH, Request|id, Options|dict, Topic|uri]`
* `[PUBLISH, Request|id, Options|dict, Topic|uri, Arguments|list]`
* `[PUBLISH, Request|id, Options|dict, Topic|uri, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 16
"""
The WAMP message code for this type of message.
"""
def __init__(self,
request,
topic,
args = None,
kwargs = None,
acknowledge = None,
excludeMe = None,
exclude = None,
eligible = None,
discloseMe = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param topic: The WAMP or application URI of the PubSub topic the event should
be published to.
:type topic: str
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param acknowledge: If True, acknowledge the publication with a success or
error response.
:type acknowledge: bool
:param excludeMe: If True, exclude the publisher from receiving the event, even
if he is subscribed (and eligible).
:type excludeMe: bool
:param exclude: List of WAMP session IDs to exclude from receiving this event.
:type exclude: list
:param eligible: List of WAMP session IDs eligible to receive this event.
:type eligible: list
:param discloseMe: If True, request to disclose the publisher of this event
to subscribers.
:type discloseMe: bool
"""
assert(type(request) in six.integer_types)
assert(type(topic) == six.text_type)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(acknowledge is None or type(acknowledge) == bool)
assert(excludeMe is None or type(excludeMe) == bool)
assert(exclude is None or type(exclude) == list)
assert(eligible is None or type(eligible) == list)
assert(discloseMe is None or type(discloseMe) == bool)
Message.__init__(self)
self.request = request
self.topic = topic
self.args = args
self.kwargs = kwargs
self.acknowledge = acknowledge
self.excludeMe = excludeMe
self.exclude = exclude
self.eligible = eligible
self.discloseMe = discloseMe
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Publish.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for PUBLISH".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in PUBLISH")
options = check_or_raise_extra(wmsg[2], "'options' in PUBLISH")
topic = check_or_raise_uri(wmsg[3], "'topic' in PUBLISH")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in PUBLISH".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in PUBLISH".format(type(kwargs)))
acknowledge = None
excludeMe = None
exclude = None
eligible = None
discloseMe = None
if u'acknowledge' in options:
option_acknowledge = options[u'acknowledge']
if type(option_acknowledge) != bool:
raise ProtocolError("invalid type {} for 'acknowledge' option in PUBLISH".format(type(option_acknowledge)))
acknowledge = option_acknowledge
if u'exclude_me' in options:
option_excludeMe = options[u'exclude_me']
if type(option_excludeMe) != bool:
raise ProtocolError("invalid type {} for 'exclude_me' option in PUBLISH".format(type(option_excludeMe)))
excludeMe = option_excludeMe
if u'exclude' in options:
option_exclude = options[u'exclude']
if type(option_exclude) != list:
raise ProtocolError("invalid type {} for 'exclude' option in PUBLISH".format(type(option_exclude)))
for sessionId in option_exclude:
if type(sessionId) not in six.integer_types:
raise ProtocolError("invalid type {} for value in 'exclude' option in PUBLISH".format(type(sessionId)))
exclude = option_exclude
if u'eligible' in options:
option_eligible = options[u'eligible']
if type(option_eligible) != list:
raise ProtocolError("invalid type {} for 'eligible' option in PUBLISH".format(type(option_eligible)))
for sessionId in option_eligible:
if type(sessionId) not in six.integer_types:
raise ProtocolError("invalid type {} for value in 'eligible' option in PUBLISH".format(type(sessionId)))
eligible = option_eligible
if u'disclose_me' in options:
option_discloseMe = options[u'disclose_me']
if type(option_discloseMe) != bool:
raise ProtocolError("invalid type {} for 'disclose_me' option in PUBLISH".format(type(option_discloseMe)))
discloseMe = option_discloseMe
obj = Publish(request,
topic,
args = args,
kwargs = kwargs,
acknowledge = acknowledge,
excludeMe = excludeMe,
exclude = exclude,
eligible = eligible,
discloseMe = discloseMe)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.acknowledge is not None:
options[u'acknowledge'] = self.acknowledge
if self.excludeMe is not None:
options[u'exclude_me'] = self.excludeMe
if self.exclude is not None:
options[u'exclude'] = self.exclude
if self.eligible is not None:
options[u'eligible'] = self.eligible
if self.discloseMe is not None:
options[u'disclose_me'] = self.discloseMe
if self.kwargs:
return [Publish.MESSAGE_TYPE, self.request, options, self.topic, self.args, self.kwargs]
elif self.args:
return [Publish.MESSAGE_TYPE, self.request, options, self.topic, self.args]
else:
return [Publish.MESSAGE_TYPE, self.request, options, self.topic]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP PUBLISH Message (request = {}, topic = {}, args = {}, kwargs = {}, acknowledge = {}, excludeMe = {}, exclude = {}, eligible = {}, discloseMe = {})".format(self.request, self.topic, self.args, self.kwargs, self.acknowledge, self.excludeMe, self.exclude, self.eligible, self.discloseMe)
@implementer(IMessage)
class Published(Message):
"""
A WAMP `PUBLISHED` message.
Format: `[PUBLISHED, PUBLISH.Request|id, Publication|id]`
"""
MESSAGE_TYPE = 17
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, publication):
"""
Message constructor.
:param request: The request ID of the original `PUBLISH` request.
:type request: int
:param publication: The publication ID for the published event.
:type publication: int
"""
assert(type(request) in six.integer_types)
assert(type(publication) in six.integer_types)
Message.__init__(self)
self.request = request
self.publication = publication
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Published.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for PUBLISHED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in PUBLISHED")
publication = check_or_raise_id(wmsg[2], "'publication' in PUBLISHED")
obj = Published(request, publication)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Published.MESSAGE_TYPE, self.request, self.publication]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP PUBLISHED Message (request = {}, publication = {})".format(self.request, self.publication)
@implementer(IMessage)
class Subscribe(Message):
"""
A WAMP `SUBSCRIBE` message.
Format: `[SUBSCRIBE, Request|id, Options|dict, Topic|uri]`
"""
MESSAGE_TYPE = 32
"""
The WAMP message code for this type of message.
"""
MATCH_EXACT = u'exact'
MATCH_PREFIX = u'prefix'
MATCH_WILDCARD = u'wildcard'
def __init__(self, request, topic, match = MATCH_EXACT):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param topic: The WAMP or application URI of the PubSub topic to subscribe to.
:type topic: str
:param match: The topic matching method to be used for the subscription.
:type match: str
"""
assert(type(request) in six.integer_types)
assert(type(topic) == six.text_type)
assert(match is None or type(match) == six.text_type)
assert(match is None or match in [self.MATCH_EXACT, self.MATCH_PREFIX, self.MATCH_WILDCARD])
Message.__init__(self)
self.request = request
self.topic = topic
self.match = match
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Subscribe.MESSAGE_TYPE)
if len(wmsg) != 4:
raise ProtocolError("invalid message length {} for SUBSCRIBE".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in SUBSCRIBE")
options = check_or_raise_extra(wmsg[2], "'options' in SUBSCRIBE")
topic = check_or_raise_uri(wmsg[3], "'topic' in SUBSCRIBE")
match = Subscribe.MATCH_EXACT
if u'match' in options:
option_match = options[u'match']
if type(option_match) != six.text_type:
raise ProtocolError("invalid type {} for 'match' option in SUBSCRIBE".format(type(option_match)))
if option_match not in [Subscribe.MATCH_EXACT, Subscribe.MATCH_PREFIX, Subscribe.MATCH_WILDCARD]:
raise ProtocolError("invalid value {} for 'match' option in SUBSCRIBE".format(option_match))
match = option_match
obj = Subscribe(request, topic, match)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.match and self.match != Subscribe.MATCH_EXACT:
options[u'match'] = self.match
return [Subscribe.MESSAGE_TYPE, self.request, options, self.topic]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP SUBSCRIBE Message (request = {}, topic = {}, match = {})".format(self.request, self.topic, self.match)
@implementer(IMessage)
class Subscribed(Message):
"""
A WAMP `SUBSCRIBED` message.
Format: `[SUBSCRIBED, SUBSCRIBE.Request|id, Subscription|id]`
"""
MESSAGE_TYPE = 33
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, subscription):
"""
Message constructor.
:param request: The request ID of the original `SUBSCRIBE` request.
:type request: int
:param subscription: The subscription ID for the subscribed topic (or topic pattern).
:type subscription: int
"""
assert(type(request) in six.integer_types)
assert(type(subscription) in six.integer_types)
Message.__init__(self)
self.request = request
self.subscription = subscription
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Subscribed.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for SUBSCRIBED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in SUBSCRIBED")
subscription = check_or_raise_id(wmsg[2], "'subscription' in SUBSCRIBED")
obj = Subscribed(request, subscription)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Subscribed.MESSAGE_TYPE, self.request, self.subscription]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP SUBSCRIBED Message (request = {}, subscription = {})".format(self.request, self.subscription)
@implementer(IMessage)
class Unsubscribe(Message):
"""
A WAMP `UNSUBSCRIBE` message.
Format: `[UNSUBSCRIBE, Request|id, SUBSCRIBED.Subscription|id]`
"""
MESSAGE_TYPE = 34
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, subscription):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param subscription: The subscription ID for the subscription to unsubscribe from.
:type subscription: int
"""
assert(type(request) in six.integer_types)
assert(type(subscription) in six.integer_types)
Message.__init__(self)
self.request = request
self.subscription = subscription
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unsubscribe.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for WAMP UNSUBSCRIBE".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNSUBSCRIBE")
subscription = check_or_raise_id(wmsg[2], "'subscription' in UNSUBSCRIBE")
obj = Unsubscribe(request, subscription)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unsubscribe.MESSAGE_TYPE, self.request, self.subscription]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNSUBSCRIBE Message (request = {}, subscription = {})".format(self.request, self.subscription)
@implementer(IMessage)
class Unsubscribed(Message):
"""
A WAMP `UNSUBSCRIBED` message.
Format: `[UNSUBSCRIBED, UNSUBSCRIBE.Request|id]`
"""
MESSAGE_TYPE = 35
"""
The WAMP message code for this type of message.
"""
def __init__(self, request):
"""
Message constructor.
:param request: The request ID of the original `UNSUBSCRIBE` request.
:type request: int
"""
assert(type(request) in six.integer_types)
Message.__init__(self)
self.request = request
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unsubscribed.MESSAGE_TYPE)
if len(wmsg) != 2:
raise ProtocolError("invalid message length {} for UNSUBSCRIBED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNSUBSCRIBED")
obj = Unsubscribed(request)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unsubscribed.MESSAGE_TYPE, self.request]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNSUBSCRIBED Message (request = {})".format(self.request)
@implementer(IMessage)
class Event(Message):
"""
A WAMP `EVENT` message.
Formats:
* `[EVENT, SUBSCRIBED.Subscription|id, PUBLISHED.Publication|id, Details|dict]`
* `[EVENT, SUBSCRIBED.Subscription|id, PUBLISHED.Publication|id, Details|dict, PUBLISH.Arguments|list]`
* `[EVENT, SUBSCRIBED.Subscription|id, PUBLISHED.Publication|id, Details|dict, PUBLISH.Arguments|list, PUBLISH.ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 36
"""
The WAMP message code for this type of message.
"""
def __init__(self, subscription, publication, args = None, kwargs = None, publisher = None):
"""
Message constructor.
:param subscription: The subscription ID this event is dispatched under.
:type subscription: int
:param publication: The publication ID of the dispatched event.
:type publication: int
:param args: Positional values for application-defined exception.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined exception.
Must be serializable using any serializers in use.
:type kwargs: dict
:param publisher: If present, the WAMP session ID of the publisher of this event.
:type publisher: str
"""
assert(type(subscription) in six.integer_types)
assert(type(publication) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(publisher is None or type(publisher) in six.integer_types)
Message.__init__(self)
self.subscription = subscription
self.publication = publication
self.args = args
self.kwargs = kwargs
self.publisher = publisher
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Event.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for EVENT".format(len(wmsg)))
subscription = check_or_raise_id(wmsg[1], "'subscription' in EVENT")
publication = check_or_raise_id(wmsg[2], "'publication' in EVENT")
details = check_or_raise_extra(wmsg[3], "'details' in EVENT")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in EVENT".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in EVENT".format(type(kwargs)))
publisher = None
if u'publisher' in details:
detail_publisher = details[u'publisher']
if type(detail_publisher) not in six.integer_types:
raise ProtocolError("invalid type {} for 'publisher' detail in EVENT".format(type(detail_publisher)))
publisher = detail_publisher
obj = Event(subscription,
publication,
args = args,
kwargs = kwargs,
publisher = publisher)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.publisher is not None:
details[u'publisher'] = self.publisher
if self.kwargs:
return [Event.MESSAGE_TYPE, self.subscription, self.publication, details, self.args, self.kwargs]
elif self.args:
return [Event.MESSAGE_TYPE, self.subscription, self.publication, details, self.args]
else:
return [Event.MESSAGE_TYPE, self.subscription, self.publication, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP EVENT Message (subscription = {}, publication = {}, args = {}, kwargs = {}, publisher = {})".format(self.subscription, self.publication, self.args, self.kwargs, self.publisher)
@implementer(IMessage)
class Call(Message):
"""
A WAMP `CALL` message.
Formats:
* `[CALL, Request|id, Options|dict, Procedure|uri]`
* `[CALL, Request|id, Options|dict, Procedure|uri, Arguments|list]`
* `[CALL, Request|id, Options|dict, Procedure|uri, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 48
"""
The WAMP message code for this type of message.
"""
def __init__(self,
request,
procedure,
args = None,
kwargs = None,
timeout = None,
receive_progress = None,
discloseMe = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param procedure: The WAMP or application URI of the procedure which should be called.
:type procedure: str
:param args: Positional values for application-defined call arguments.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined call arguments.
Must be serializable using any serializers in use.
:param timeout: If present, let the callee automatically cancel
the call after this ms.
:type timeout: int
"""
assert(type(request) in six.integer_types)
assert(type(procedure) == six.text_type)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(timeout is None or type(timeout) in six.integer_types)
assert(receive_progress is None or type(receive_progress) == bool)
assert(discloseMe is None or type(discloseMe) == bool)
Message.__init__(self)
self.request = request
self.procedure = procedure
self.args = args
self.kwargs = kwargs
self.timeout = timeout
self.receive_progress = receive_progress
self.discloseMe = discloseMe
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Call.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for CALL".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in CALL")
options = check_or_raise_extra(wmsg[2], "'options' in CALL")
procedure = check_or_raise_uri(wmsg[3], "'procedure' in CALL")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in CALL".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in CALL".format(type(kwargs)))
timeout = None
if u'timeout' in options:
option_timeout = options[u'timeout']
if type(option_timeout) not in six.integer_types:
raise ProtocolError("invalid type {} for 'timeout' option in CALL".format(type(option_timeout)))
if option_timeout < 0:
raise ProtocolError("invalid value {} for 'timeout' option in CALL".format(option_timeout))
timeout = option_timeout
receive_progress = None
if u'receive_progress' in options:
option_receive_progress = options[u'receive_progress']
if type(option_receive_progress) != bool:
raise ProtocolError("invalid type {} for 'receive_progress' option in CALL".format(type(option_receive_progress)))
receive_progress = option_receive_progress
discloseMe = None
if u'disclose_me' in options:
option_discloseMe = options[u'disclose_me']
if type(option_discloseMe) != bool:
raise ProtocolError("invalid type {} for 'disclose_me' option in CALL".format(type(option_discloseMe)))
discloseMe = option_discloseMe
obj = Call(request,
procedure,
args = args,
kwargs = kwargs,
timeout = timeout,
receive_progress = receive_progress,
discloseMe = discloseMe)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.timeout is not None:
options[u'timeout'] = self.timeout
if self.receive_progress is not None:
options[u'receive_progress'] = self.receive_progress
if self.discloseMe is not None:
options[u'disclose_me'] = self.discloseMe
if self.kwargs:
return [Call.MESSAGE_TYPE, self.request, options, self.procedure, self.args, self.kwargs]
elif self.args:
return [Call.MESSAGE_TYPE, self.request, options, self.procedure, self.args]
else:
return [Call.MESSAGE_TYPE, self.request, options, self.procedure]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP CALL Message (request = {}, procedure = {}, args = {}, kwargs = {}, timeout = {}, receive_progress = {}, discloseMe = {})".format(self.request, self.procedure, self.args, self.kwargs, self.timeout, self.receive_progress, self.discloseMe)
@implementer(IMessage)
class Cancel(Message):
"""
A WAMP `CANCEL` message.
Format: `[CANCEL, CALL.Request|id, Options|dict]`
"""
MESSAGE_TYPE = 49
"""
The WAMP message code for this type of message.
"""
SKIP = u'skip'
ABORT = u'abort'
KILL = u'kill'
def __init__(self, request, mode = None):
"""
Message constructor.
:param request: The WAMP request ID of the original `CALL` to cancel.
:type request: int
:param mode: Specifies how to cancel the call (`"skip"`, `"abort"` or `"kill"`).
:type mode: str
"""
assert(type(request) in six.integer_types)
assert(mode is None or type(mode) == six.text_type)
assert(mode is None or mode in [self.SKIP, self.ABORT, self.KILL])
Message.__init__(self)
self.request = request
self.mode = mode
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Cancel.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for CANCEL".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in CANCEL")
options = check_or_raise_extra(wmsg[2], "'options' in CANCEL")
## options
##
mode = None
if u'mode' in options:
option_mode = options[u'mode']
if type(option_mode) != six.text_type:
raise ProtocolError("invalid type {} for 'mode' option in CANCEL".format(type(option_mode)))
if option_mode not in [Cancel.SKIP, Cancel.ABORT, Cancel.KILL]:
raise ProtocolError("invalid value '{}' for 'mode' option in CANCEL".format(option_mode))
mode = option_mode
obj = Cancel(request, mode = mode)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.mode is not None:
options[u'mode'] = self.mode
return [Cancel.MESSAGE_TYPE, self.request, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP CANCEL Message (request = {}, mode = '{}'')".format(self.request, self.mode)
@implementer(IMessage)
class Result(Message):
"""
A WAMP `RESULT` message.
Formats:
* `[RESULT, CALL.Request|id, Details|dict]`
* `[RESULT, CALL.Request|id, Details|dict, YIELD.Arguments|list]`
* `[RESULT, CALL.Request|id, Details|dict, YIELD.Arguments|list, YIELD.ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 50
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, args = None, kwargs = None, progress = None):
"""
Message constructor.
:param request: The request ID of the original `CALL` request.
:type request: int
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param progress: If `True`, this result is a progressive call result, and subsequent
results (or a final error) will follow.
:type progress: bool
"""
assert(type(request) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(progress is None or type(progress) == bool)
Message.__init__(self)
self.request = request
self.args = args
self.kwargs = kwargs
self.progress = progress
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Result.MESSAGE_TYPE)
if len(wmsg) not in (3, 4, 5):
raise ProtocolError("invalid message length {} for RESULT".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in RESULT")
details = check_or_raise_extra(wmsg[2], "'details' in RESULT")
args = None
if len(wmsg) > 3:
args = wmsg[3]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in RESULT".format(type(args)))
kwargs = None
if len(wmsg) > 4:
kwargs = wmsg[4]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in RESULT".format(type(kwargs)))
progress = None
if u'progress' in details:
detail_progress = details[u'progress']
if type(detail_progress) != bool:
raise ProtocolError("invalid type {} for 'progress' option in RESULT".format(type(detail_progress)))
progress = detail_progress
obj = Result(request, args = args, kwargs = kwargs, progress = progress)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.progress is not None:
details[u'progress'] = self.progress
if self.kwargs:
return [Result.MESSAGE_TYPE, self.request, details, self.args, self.kwargs]
elif self.args:
return [Result.MESSAGE_TYPE, self.request, details, self.args]
else:
return [Result.MESSAGE_TYPE, self.request, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP RESULT Message (request = {}, args = {}, kwargs = {}, progress = {})".format(self.request, self.args, self.kwargs, self.progress)
@implementer(IMessage)
class Register(Message):
"""
A WAMP `REGISTER` message.
Format: `[REGISTER, Request|id, Options|dict, Procedure|uri]`
"""
MESSAGE_TYPE = 64
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, procedure, pkeys = None, discloseCaller = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param procedure: The WAMP or application URI of the RPC endpoint provided.
:type procedure: str
:param pkeys: The endpoint can work for this list of application partition keys.
:type pkeys: list
"""
assert(type(request) in six.integer_types)
assert(type(procedure) == six.text_type)
assert(pkeys is None or type(pkeys) == list)
if pkeys:
for k in pkeys:
assert(type(k) in six.integer_types)
assert(discloseCaller is None or type(discloseCaller) == bool)
Message.__init__(self)
self.request = request
self.procedure = procedure
self.pkeys = pkeys
self.discloseCaller = discloseCaller
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Register.MESSAGE_TYPE)
if len(wmsg) != 4:
raise ProtocolError("invalid message length {} for REGISTER".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in REGISTER")
options = check_or_raise_extra(wmsg[2], "'options' in REGISTER")
procedure = check_or_raise_uri(wmsg[3], "'procedure' in REGISTER")
pkeys = None
discloseCaller = None
if u'pkeys' in options:
option_pkeys = options[u'pkeys']
if type(option_pkeys) != list:
raise ProtocolError("invalid type {} for 'pkeys' option in REGISTER".format(type(option_pkeys)))
for pk in option_pkeys:
if type(pk) not in six.integer_types:
raise ProtocolError("invalid type for value '{}' in 'pkeys' option in REGISTER".format(type(pk)))
pkeys = option_pkeys
if u'disclose_caller' in options:
option_discloseCaller = options[u'disclose_caller']
if type(option_discloseCaller) != bool:
raise ProtocolError("invalid type {} for 'disclose_caller' option in REGISTER".format(type(option_discloseCaller)))
discloseCaller = option_discloseCaller
obj = Register(request, procedure, pkeys = pkeys, discloseCaller = discloseCaller)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.pkeys is not None:
options[u'pkeys'] = self.pkeys
if self.discloseCaller is not None:
options[u'disclose_caller'] = self.discloseCaller
return [Register.MESSAGE_TYPE, self.request, options, self.procedure]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP REGISTER Message (request = {}, procedure = {}, pkeys = {}, discloseCaller = {})".format(self.request, self.procedure, self.pkeys, self.discloseCaller)
@implementer(IMessage)
class Registered(Message):
"""
A WAMP `REGISTERED` message.
Format: `[REGISTERED, REGISTER.Request|id, Registration|id]`
"""
MESSAGE_TYPE = 65
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, registration):
"""
Message constructor.
:param request: The request ID of the original `REGISTER` request.
:type request: int
:param registration: The registration ID for the registered procedure (or procedure pattern).
:type registration: int
"""
assert(type(request) in six.integer_types)
assert(type(registration) in six.integer_types)
Message.__init__(self)
self.request = request
self.registration = registration
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Registered.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for REGISTERED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in REGISTERED")
registration = check_or_raise_id(wmsg[2], "'registration' in REGISTERED")
obj = Registered(request, registration)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Registered.MESSAGE_TYPE, self.request, self.registration]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP REGISTERED Message (request = {}, registration = {})".format(self.request, self.registration)
@implementer(IMessage)
class Unregister(Message):
"""
A WAMP Unprovide message.
Format: `[UNREGISTER, Request|id, REGISTERED.Registration|id]`
"""
MESSAGE_TYPE = 66
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, registration):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param registration: The registration ID for the registration to unregister.
:type registration: int
"""
assert(type(request) in six.integer_types)
assert(type(registration) in six.integer_types)
Message.__init__(self)
self.request = request
self.registration = registration
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unregister.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for WAMP UNREGISTER".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNREGISTER")
registration = check_or_raise_id(wmsg[2], "'registration' in UNREGISTER")
obj = Unregister(request, registration)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unregister.MESSAGE_TYPE, self.request, self.registration]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNREGISTER Message (request = {}, registration = {})".format(self.request, self.registration)
@implementer(IMessage)
class Unregistered(Message):
"""
A WAMP `UNREGISTERED` message.
Format: `[UNREGISTERED, UNREGISTER.Request|id]`
"""
MESSAGE_TYPE = 67
"""
The WAMP message code for this type of message.
"""
def __init__(self, request):
"""
Message constructor.
:param request: The request ID of the original `UNREGISTER` request.
:type request: int
"""
assert(type(request) in six.integer_types)
Message.__init__(self)
self.request = request
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unregistered.MESSAGE_TYPE)
if len(wmsg) != 2:
raise ProtocolError("invalid message length {} for UNREGISTER".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNREGISTER")
obj = Unregistered(request)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unregistered.MESSAGE_TYPE, self.request]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNREGISTER Message (request = {})".format(self.request)
@implementer(IMessage)
class Invocation(Message):
"""
A WAMP `INVOCATION` message.
Formats:
* `[INVOCATION, Request|id, REGISTERED.Registration|id, Details|dict]`
* `[INVOCATION, Request|id, REGISTERED.Registration|id, Details|dict, CALL.Arguments|list]`
* `[INVOCATION, Request|id, REGISTERED.Registration|id, Details|dict, CALL.Arguments|list, CALL.ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 68
"""
The WAMP message code for this type of message.
"""
def __init__(self,
request,
registration,
args = None,
kwargs = None,
timeout = None,
receive_progress = None,
caller = None,
authid = None,
authrole = None,
authmethod = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param registration: The registration ID of the endpoint to be invoked.
:type registration: int
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param timeout: If present, let the callee automatically cancels
the invocation after this ms.
:type timeout: int
"""
assert(type(request) in six.integer_types)
assert(type(registration) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(timeout is None or type(timeout) in six.integer_types)
assert(receive_progress is None or type(receive_progress) == bool)
assert(caller is None or type(caller) in six.integer_types)
assert(authid is None or type(authid) == six.text_type)
assert(authrole is None or type(authrole) == six.text_type)
assert(authmethod is None or type(authmethod) == six.text_type)
Message.__init__(self)
self.request = request
self.registration = registration
self.args = args
self.kwargs = kwargs
self.timeout = timeout
self.receive_progress = receive_progress
self.caller = caller
self.authid = authid
self.authrole = authrole
self.authmethod = authmethod
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Invocation.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for INVOCATION".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in INVOCATION")
registration = check_or_raise_id(wmsg[2], "'registration' in INVOCATION")
details = check_or_raise_extra(wmsg[3], "'details' in INVOCATION")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in INVOCATION".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in INVOCATION".format(type(kwargs)))
timeout = None
if u'timeout' in details:
detail_timeout = details[u'timeout']
if type(detail_timeout) not in six.integer_types:
raise ProtocolError("invalid type {} for 'timeout' detail in INVOCATION".format(type(detail_timeout)))
if detail_timeout < 0:
raise ProtocolError("invalid value {} for 'timeout' detail in INVOCATION".format(detail_timeout))
timeout = detail_timeout
receive_progress = None
if u'receive_progress' in details:
detail_receive_progress = details[u'receive_progress']
if type(detail_receive_progress) != bool:
raise ProtocolError("invalid type {} for 'receive_progress' detail in INVOCATION".format(type(detail_receive_progress)))
receive_progress = detail_receive_progress
caller = None
if u'caller' in details:
detail_caller = details[u'caller']
if type(detail_caller) not in six.integer_types:
raise ProtocolError("invalid type {} for 'caller' detail in INVOCATION".format(type(detail_caller)))
caller = detail_caller
authid = None
if u'authid' in details:
detail_authid = details[u'authid']
if type(detail_authid) != six.text_type:
raise ProtocolError("invalid type {} for 'authid' detail in INVOCATION".format(type(detail_authid)))
authid = detail_authid
authrole = None
if u'authrole' in details:
detail_authrole = details[u'authrole']
if type(detail_authrole) != six.text_type:
raise ProtocolError("invalid type {} for 'authrole' detail in INVOCATION".format(type(detail_authrole)))
authrole = detail_authrole
authmethod = None
if u'authmethod' in details:
detail_authmethod = details[u'authmethod']
if type(detail_authrole) != six.text_type:
raise ProtocolError("invalid type {} for 'authmethod' detail in INVOCATION".format(type(detail_authrole)))
authmethod = detail_authmethod
obj = Invocation(request,
registration,
args = args,
kwargs = kwargs,
timeout = timeout,
receive_progress = receive_progress,
caller = caller,
authid = authid,
authrole = authrole,
authmethod = authmethod)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.timeout is not None:
options[u'timeout'] = self.timeout
if self.receive_progress is not None:
options[u'receive_progress'] = self.receive_progress
if self.caller is not None:
options[u'caller'] = self.caller
if self.authid is not None:
options[u'authid'] = self.authid
if self.authrole is not None:
options[u'authrole'] = self.authrole
if self.authmethod is not None:
options[u'authmethod'] = self.authmethod
if self.kwargs:
return [Invocation.MESSAGE_TYPE, self.request, self.registration, options, self.args, self.kwargs]
elif self.args:
return [Invocation.MESSAGE_TYPE, self.request, self.registration, options, self.args]
else:
return [Invocation.MESSAGE_TYPE, self.request, self.registration, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP INVOCATION Message (request = {}, registration = {}, args = {}, kwargs = {}, timeout = {}, receive_progress = {}, caller = {}, authid = {}, authrole = {}, authmethod = {})".format(self.request, self.registration, self.args, self.kwargs, self.timeout, self.receive_progress, self.caller, self.authid, self.authrole, self.authmethod)
@implementer(IMessage)
class Interrupt(Message):
"""
A WAMP `INTERRUPT` message.
Format: `[INTERRUPT, INVOCATION.Request|id, Options|dict]`
"""
MESSAGE_TYPE = 69
"""
The WAMP message code for this type of message.
"""
ABORT = u'abort'
KILL = u'kill'
def __init__(self, request, mode = None):
"""
Message constructor.
:param request: The WAMP request ID of the original `INVOCATION` to interrupt.
:type request: int
:param mode: Specifies how to interrupt the invocation (`"abort"` or `"kill"`).
:type mode: str
"""
assert(type(request) in six.integer_types)
assert(mode is None or type(mode) == six.text_type)
assert(mode is None or mode in [self.ABORT, self.KILL])
Message.__init__(self)
self.request = request
self.mode = mode
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Interrupt.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for INTERRUPT".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in INTERRUPT")
options = check_or_raise_extra(wmsg[2], "'options' in INTERRUPT")
## options
##
mode = None
if u'mode' in options:
option_mode = options[u'mode']
if type(option_mode) != six.text_type:
raise ProtocolError("invalid type {} for 'mode' option in INTERRUPT".format(type(option_mode)))
if option_mode not in [Interrupt.ABORT, Interrupt.KILL]:
raise ProtocolError("invalid value '{}' for 'mode' option in INTERRUPT".format(option_mode))
mode = option_mode
obj = Interrupt(request, mode = mode)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.mode is not None:
options[u'mode'] = self.mode
return [Interrupt.MESSAGE_TYPE, self.request, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP INTERRUPT Message (request = {}, mode = '{}'')".format(self.request, self.mode)
@implementer(IMessage)
class Yield(Message):
"""
A WAMP `YIELD` message.
Formats:
* `[YIELD, INVOCATION.Request|id, Options|dict]`
* `[YIELD, INVOCATION.Request|id, Options|dict, Arguments|list]`
* `[YIELD, INVOCATION.Request|id, Options|dict, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 70
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, args = None, kwargs = None, progress = None):
"""
Message constructor.
:param request: The WAMP request ID of the original call.
:type request: int
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param progress: If `True`, this result is a progressive invocation result, and subsequent
results (or a final error) will follow.
:type progress: bool
"""
assert(type(request) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(progress is None or type(progress) == bool)
Message.__init__(self)
self.request = request
self.args = args
self.kwargs = kwargs
self.progress = progress
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Yield.MESSAGE_TYPE)
if len(wmsg) not in (3, 4, 5):
raise ProtocolError("invalid message length {} for YIELD".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in YIELD")
options = check_or_raise_extra(wmsg[2], "'options' in YIELD")
args = None
if len(wmsg) > 3:
args = wmsg[3]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in YIELD".format(type(args)))
kwargs = None
if len(wmsg) > 4:
kwargs = wmsg[4]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in YIELD".format(type(kwargs)))
progress = None
if u'progress' in options:
option_progress = options[u'progress']
if type(option_progress) != bool:
raise ProtocolError("invalid type {} for 'progress' option in YIELD".format(type(option_progress)))
progress = option_progress
obj = Yield(request, args = args, kwargs = kwargs, progress = progress)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.progress is not None:
options[u'progress'] = self.progress
if self.kwargs:
return [Yield.MESSAGE_TYPE, self.request, options, self.args, self.kwargs]
elif self.args:
return [Yield.MESSAGE_TYPE, self.request, options, self.args]
else:
return [Yield.MESSAGE_TYPE, self.request, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP YIELD Message (request = {}, args = {}, kwargs = {}, progress = {})".format(self.request, self.args, self.kwargs, self.progress)
| [
"tobias.oberstein@tavendo.de"
] | tobias.oberstein@tavendo.de |
f5c6201a2927472f717a1178da02bcccb2d9b5b5 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/services/services/ad_parameter_service/transports/grpc.py | 584d8c2434a25ed27716cbfbc4c3bdafe8094dd1 | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,900 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v8.resources.types import ad_parameter
from google.ads.googleads.v8.services.types import ad_parameter_service
from .base import AdParameterServiceTransport, DEFAULT_CLIENT_INFO
class AdParameterServiceGrpcTransport(AdParameterServiceTransport):
"""gRPC backend transport for AdParameterService.
Service to manage ad parameters.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(self, *,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning)
host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
)
@classmethod
def create_channel(cls,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_ad_parameter(self) -> Callable[
[ad_parameter_service.GetAdParameterRequest],
ad_parameter.AdParameter]:
r"""Return a callable for the get ad parameter method over gRPC.
Returns the requested ad parameter in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.GetAdParameterRequest],
~.AdParameter]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_ad_parameter' not in self._stubs:
self._stubs['get_ad_parameter'] = self.grpc_channel.unary_unary(
'/google.ads.googleads.v8.services.AdParameterService/GetAdParameter',
request_serializer=ad_parameter_service.GetAdParameterRequest.serialize,
response_deserializer=ad_parameter.AdParameter.deserialize,
)
return self._stubs['get_ad_parameter']
@property
def mutate_ad_parameters(self) -> Callable[
[ad_parameter_service.MutateAdParametersRequest],
ad_parameter_service.MutateAdParametersResponse]:
r"""Return a callable for the mutate ad parameters method over gRPC.
Creates, updates, or removes ad parameters. Operation statuses
are returned.
List of thrown errors: `AdParameterError <>`__
`AuthenticationError <>`__ `AuthorizationError <>`__
`ContextError <>`__ `DatabaseError <>`__ `FieldError <>`__
`FieldMaskError <>`__ `HeaderError <>`__ `InternalError <>`__
`MutateError <>`__ `QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.MutateAdParametersRequest],
~.MutateAdParametersResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'mutate_ad_parameters' not in self._stubs:
self._stubs['mutate_ad_parameters'] = self.grpc_channel.unary_unary(
'/google.ads.googleads.v8.services.AdParameterService/MutateAdParameters',
request_serializer=ad_parameter_service.MutateAdParametersRequest.serialize,
response_deserializer=ad_parameter_service.MutateAdParametersResponse.deserialize,
)
return self._stubs['mutate_ad_parameters']
__all__ = (
'AdParameterServiceGrpcTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
56c198953033a0f78f55db2c3afa95d0de6787e9 | 0c084555ac64297faacf2a62df80b7caf3fc8822 | /mygene.info/src/hub/dataload/sources/entrez/unigene_upload.py | cc9caeafb5aaa0749dddab7917463902f7283c54 | [
"Apache-2.0"
] | permissive | NCATS-Tangerine/MyGeneBeacon | ae59b3a49142fe8da57d421a3494000c61cbc35e | 5c70d986d522ee9d9b709784afe992e315a5b76e | refs/heads/master | 2021-05-13T18:05:44.020677 | 2018-01-09T18:58:06 | 2018-01-09T18:58:06 | 116,851,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | from .parser import Gene2UnigeneParser
import biothings.hub.dataload.uploader as uploader
class EntrezUnigeneUploader(uploader.MergerSourceUploader):
name = "entrez_unigene"
main_source = "entrez"
def load_data(self, data_folder):
self.parser = Gene2UnigeneParser(data_folder)
self.parser.set_all_species()
gene2unigene = self.parser.load()
return gene2unigene
@classmethod
def get_mapping(klass):
mapping = {
"unigene": {
"type": "string",
"analyzer": "string_lowercase"
}
}
return mapping
| [
"lhannest@sfu.ca"
] | lhannest@sfu.ca |
cd01e7265b05f24764a2257ba093d8968d37c3f9 | 5d08402e56855d55d0f14ff7642f27e01cadcced | /listing/migrations/0011_auto_20210112_0908.py | dcc94ef6a06111b4ef4ed8b9ac93d7f9c3d52654 | [] | no_license | crowdbotics-apps/test-23821 | 783d757965931f35819343bf57627265dfa5f6e5 | 5cf465eabbd921055d31ac38b54693aa581713a5 | refs/heads/master | 2023-02-12T15:23:18.370076 | 2021-01-14T00:46:16 | 2021-01-14T00:46:16 | 329,440,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,243 | py | # Generated by Django 2.2.17 on 2021-01-12 09:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('listing', '0010_merge_20210112_0906'),
]
operations = [
migrations.CreateModel(
name='ListingPlan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('slug', models.SlugField(max_length=30, unique=True)),
('seller_price', models.FloatField(blank=True, null=True)),
('buyer_percentage', models.FloatField(blank=True, null=True)),
('min', models.FloatField(blank=True, null=True)),
('max', models.FloatField(blank=True, null=True)),
],
),
migrations.RemoveField(
model_name='listing',
name='listing_type',
),
migrations.AddField(
model_name='listing',
name='plan',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='listing.ListingPlan'),
),
]
| [
"dawarsardar786@gmail.com"
] | dawarsardar786@gmail.com |
9119d3d91838e00e3ab736b6c6f7daa0efe4acd9 | 70cf354a5eb38ddaf6e594b162706f3b38925647 | /02-03/todolist/todolist/settings.py | ebbe5f45ca6c4125016ff65607df10be584ea861 | [] | no_license | hanifmisbah/PythonDjango-Practice-Coding | 0051c73c67ddfa9d49e47b1fd77b798b253275bb | e15de23fd1d7f4b118f7149d6d6ff3b25f898b82 | refs/heads/master | 2022-12-25T00:13:09.855630 | 2020-10-05T10:50:13 | 2020-10-05T10:50:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,120 | py | """
Django settings for todolist project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '70w@1hcxqg0*em	a9&jx330n*$+=cmckhwwv!f0!yl9tql=_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'task',
# 'delete'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'todolist.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'todolist.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"hanifmisbah97@gmail.com"
] | hanifmisbah97@gmail.com |
0ae12e49362282d60fe111f7b96ed887d37a6046 | 4c2da39f554f6fb5a92c6ceb886fa5305e085b8c | /mylar/torrent/clients/qbittorrent.py | 6cb1c28065af55d5e0b3ebc6952df6db6b4e290b | [] | no_license | ferretcomp/mylar | c48520f69d43e92145bdcc9c17fd3ef735bf9b0b | 2b2687361b3decadc65b10f1b8319dad165698ea | refs/heads/master | 2021-04-15T08:15:09.923411 | 2018-01-17T20:55:24 | 2018-01-17T20:55:24 | 94,567,775 | 0 | 0 | null | 2017-06-16T17:41:55 | 2017-06-16T17:41:55 | null | UTF-8 | Python | false | false | 5,014 | py | import os
import mylar
import base64
import time
from mylar import logger, helpers
from lib.qbittorrent import client
class TorrentClient(object):
def __init__(self):
self.conn = None
def connect(self, host, username, password):
if self.conn is not None:
return self.connect
if not host:
return {'status': False}
try:
logger.info(host)
self.client = client.Client(host)
except Exception as e:
logger.error('Could not create qBittorrent Object' + str(e))
return {'status': False}
else:
try:
self.client.login(username, password)
except Exception as e:
logger.error('Could not connect to qBittorrent ' + host)
else:
return self.client
def find_torrent(self, hash):
logger.debug('Finding Torrent hash: ' + hash)
torrent_info = self.get_torrent(hash)
if torrent_info:
return True
else:
return False
def get_torrent(self, hash):
logger.debug('Getting Torrent info hash: ' + hash)
try:
torrent_info = self.client.get_torrent(hash)
except Exception as e:
logger.error('Could not get torrent info for ' + hash)
return False
else:
logger.info('Successfully located information for torrent')
return torrent_info
def load_torrent(self, filepath):
logger.info('filepath to torrent file set to : ' + filepath)
if self.client._is_authenticated is True:
logger.info('Checking if Torrent Exists!')
hash = self.get_the_hash(filepath)
logger.debug('Torrent Hash (load_torrent): "' + hash + '"')
logger.debug('FileName (load_torrent): ' + str(os.path.basename(filepath)))
#Check if torrent already added
if self.find_torrent(hash):
logger.info('load_torrent: Torrent already exists!')
return {'status': False}
#should set something here to denote that it's already loaded, and then the failed download checker not run so it doesn't download
#multiple copies of the same issues that's already downloaded
else:
logger.info('Torrent not added yet, trying to add it now!')
try:
torrent_content = open(filepath, 'rb')
tid = self.client.download_from_file(torrent_content, category=str(mylar.QBITTORRENT_LABEL))
except Exception as e:
logger.debug('Torrent not added')
return {'status': False}
else:
logger.debug('Successfully submitted for add. Verifying item is now on client.')
if mylar.QBITTORRENT_STARTONLOAD:
logger.info('attempting to start')
startit = self.client.force_start(hash)
logger.info('startit returned:' + str(startit))
else:
logger.info('attempting to pause torrent incase it starts')
try:
startit = self.client.pause(hash)
logger.info('startit paused:' + str(startit))
except:
logger.warn('Unable to pause torrent - possibly already paused?')
try:
time.sleep(5) # wait 5 in case it's not populated yet.
tinfo = self.get_torrent(hash)
except Exception as e:
logger.warn('Torrent was not added! Please check logs')
return {'status': False}
else:
logger.info('Torrent successfully added!')
filelist = self.client.get_torrent_files(hash)
#logger.info(filelist)
if len(filelist) == 1:
to_name = filelist[0]['name']
else:
to_name = tinfo['save_path']
torrent_info = {'hash': hash,
'files': filelist,
'name': to_name,
'total_filesize': tinfo['total_size'],
'folder': tinfo['save_path'],
'time_started': tinfo['addition_date'],
'label': mylar.QBITTORRENT_LABEL,
'status': True}
#logger.info(torrent_info)
return torrent_info
def get_the_hash(self, filepath):
import hashlib, StringIO
import bencode
# Open torrent file
torrent_file = open(filepath, "rb")
metainfo = bencode.decode(torrent_file.read())
info = metainfo['info']
thehash = hashlib.sha1(bencode.encode(info)).hexdigest().upper()
logger.debug('Hash: ' + thehash)
return thehash
| [
"evilhero@gmail.com"
] | evilhero@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.