hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c1080c3a09ec7cf57db7916fe5248f126bb96073
| 244
|
py
|
Python
|
exercise/app/lib/check_signature.py
|
assuzzanne/my-sqreen
|
81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b
|
[
"MIT"
] | null | null | null |
exercise/app/lib/check_signature.py
|
assuzzanne/my-sqreen
|
81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b
|
[
"MIT"
] | 1
|
2021-06-02T00:27:34.000Z
|
2021-06-02T00:27:34.000Z
|
exercise/app/lib/check_signature.py
|
assuzzanne/notifications-dispatcher-api
|
81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b
|
[
"MIT"
] | null | null | null |
import hmac
import hashlib
def check_signature(secret_key, request_signature, request_body):
hasher = hmac.new(secret_key, request_body, hashlib.sha256)
dig = hasher.hexdigest()
return hmac.compare_digest(dig, request_signature)
| 24.4
| 65
| 0.778689
|
b0332c4fd8c30a15c6b236669f5b0119a0a2f7d2
| 632
|
py
|
Python
|
labelspark/api_key.py
|
cristobalmitchell/labelspark
|
39e1df53de7c1da793944c53112d11711778332d
|
[
"Apache-2.0"
] | 6
|
2021-05-27T00:26:06.000Z
|
2022-02-03T00:29:00.000Z
|
labelspark/api_key.py
|
cristobalmitchell/labelspark
|
39e1df53de7c1da793944c53112d11711778332d
|
[
"Apache-2.0"
] | 10
|
2021-07-01T10:24:17.000Z
|
2022-02-25T20:29:10.000Z
|
labelspark/api_key.py
|
cristobalmitchell/labelspark
|
39e1df53de7c1da793944c53112d11711778332d
|
[
"Apache-2.0"
] | 3
|
2021-07-01T10:15:53.000Z
|
2021-12-29T20:24:51.000Z
|
# Databricks notebook source
#We recommend using Databricks Secrets API to create a variable for your Labelbox API Key, but if you do not have access to the Secrets API you can use this notebook template to store your API key in a separate notebook. Be sure to include in gitignore to avoid committing your API key to Git.
api_key = "insert api key"
dbutils.notebook.exit(
api_key) #returns api_key if you call this notebook via a notebook workflow
###example code for notebook workflow w/ dbutils will get api_key from notebook "api_key"
# try: API_KEY
# except NameError:
# API_KEY = dbutils.notebook.run("api_key", 60)
| 48.615385
| 293
| 0.773734
|
b99a2d8a7c40282c58d0dfcf4c77bdf4e5e8ef0f
| 273
|
py
|
Python
|
async_dns/core/logger.py
|
RomaLash/async_dns
|
badfa5302fe18a230bc9d6754f78a61b375affaa
|
[
"MIT"
] | 24
|
2020-10-16T16:36:40.000Z
|
2020-10-16T16:37:01.000Z
|
async_dns/core/logger.py
|
RomaLash/async_dns
|
badfa5302fe18a230bc9d6754f78a61b375affaa
|
[
"MIT"
] | null | null | null |
async_dns/core/logger.py
|
RomaLash/async_dns
|
badfa5302fe18a230bc9d6754f78a61b375affaa
|
[
"MIT"
] | null | null | null |
import os
import logging
logger = logging.getLogger(__package__)
logger.setLevel(os.environ.get('LOGLEVEL') or 'INFO')
handler = logging.StreamHandler()
fmt = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
handler.setFormatter(fmt)
logger.addHandler(handler)
| 27.3
| 65
| 0.772894
|
fddc58d49e1790e88d6b50e4f8169a003517fd3e
| 2,270
|
py
|
Python
|
tensorflow_datasets/core/features/audio_feature.py
|
atksh/datasets
|
814058b31ebd99e418114016d60ab4d6f8f82070
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_datasets/core/features/audio_feature.py
|
atksh/datasets
|
814058b31ebd99e418114016d60ab4d6f8f82070
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_datasets/core/features/audio_feature.py
|
atksh/datasets
|
814058b31ebd99e418114016d60ab4d6f8f82070
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2019 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Audio feature."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow_datasets.core import api_utils
from tensorflow_datasets.core import lazy_imports_lib
from tensorflow_datasets.core.features import feature
class Audio(feature.Tensor):
"""`FeatureConnector` for audio, encoded as raw integer wave form."""
@api_utils.disallow_positional_args
def __init__(self, file_format=None, shape=(None,)):
"""Constructs the connector.
Args:
file_format: `str`, the audio file format. Can be any format ffmpeg
understands. If `None`, will attempt to infer from the file extension.
shape: `tuple`, shape of the data.
"""
self._file_format = file_format
if len(shape) != 1:
raise TypeError(
"Audio feature currently only supports 1-D values, got %s." % shape
)
self._shape = shape
super(Audio, self).__init__(shape=shape, dtype=tf.int64)
def encode_example(self, audio_or_path_or_fobj):
audio = audio_or_path_or_fobj
if isinstance(audio, (np.ndarray, list)):
return audio
with tf.io.gfile.GFile(audio, "rb") as audio_f:
file_format = self._file_format or audio.split(".")[-1]
audio_segment = lazy_imports_lib.lazy_imports.pydub.AudioSegment.from_file(
audio_f, format=file_format
)
return super(Audio, self).encode_example(
np.array(audio_segment.get_array_of_samples()).astype(np.int64)
)
| 36.031746
| 87
| 0.693833
|
48e775a8637e6109dde725e9a1ce4e9206a7130c
| 1,306
|
py
|
Python
|
app/core/tests/test_admin.py
|
Brandogs/recipe-app-api
|
c2878acdc06b3d70f8ba6d2c285bb10f4caf7c79
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
Brandogs/recipe-app-api
|
c2878acdc06b3d70f8ba6d2c285bb10f4caf7c79
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
Brandogs/recipe-app-api
|
c2878acdc06b3d70f8ba6d2c285bb10f4caf7c79
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='admin@test.com',
password='password123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='user@test.com',
password='password123',
name='Test User Full Name'
)
def test_users_listed(self):
"""Test that users are listed on user page"""
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
"""Test that user edit works"""
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
"""Test that the create user page works"""
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 31.095238
| 68
| 0.637825
|
94d193ed42d38514b5bbeb731697681ea6ee2db0
| 13,011
|
py
|
Python
|
atom/proton/python/proton_api/models/financial_health_check_request.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/proton/python/proton_api/models/financial_health_check_request.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/proton/python/proton_api/models/financial_health_check_request.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Hydrogen Proton API
Financial engineering module of Hydrogen Atom # noqa: E501
OpenAPI spec version: 1.7.18
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class FinancialHealthCheckRequest(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'lookback_periods': 'int',
'gross_annual_income': 'float',
'ratio_targets': 'object',
'liquid_assets': 'float',
'client_id': 'str',
'net_monthly_income': 'float',
'total_liabilities': 'float',
'non_liquid_assets': 'float',
'short_term_liabilities': 'float',
'monthly_expenses': 'float'
}
attribute_map = {
'lookback_periods': 'lookback_periods',
'gross_annual_income': 'gross_annual_income',
'ratio_targets': 'ratio_targets',
'liquid_assets': 'liquid_assets',
'client_id': 'client_id',
'net_monthly_income': 'net_monthly_income',
'total_liabilities': 'total_liabilities',
'non_liquid_assets': 'non_liquid_assets',
'short_term_liabilities': 'short_term_liabilities',
'monthly_expenses': 'monthly_expenses'
}
def __init__(self, lookback_periods=3, gross_annual_income=None, ratio_targets=None, liquid_assets=None, client_id=None, net_monthly_income=None, total_liabilities=None, non_liquid_assets=None, short_term_liabilities=None, monthly_expenses=None): # noqa: E501
"""FinancialHealthCheckRequest - a model defined in Swagger""" # noqa: E501
self._lookback_periods = None
self._gross_annual_income = None
self._ratio_targets = None
self._liquid_assets = None
self._client_id = None
self._net_monthly_income = None
self._total_liabilities = None
self._non_liquid_assets = None
self._short_term_liabilities = None
self._monthly_expenses = None
self.discriminator = None
if lookback_periods is not None:
self.lookback_periods = lookback_periods
if gross_annual_income is not None:
self.gross_annual_income = gross_annual_income
if ratio_targets is not None:
self.ratio_targets = ratio_targets
if liquid_assets is not None:
self.liquid_assets = liquid_assets
if client_id is not None:
self.client_id = client_id
if net_monthly_income is not None:
self.net_monthly_income = net_monthly_income
if total_liabilities is not None:
self.total_liabilities = total_liabilities
if non_liquid_assets is not None:
self.non_liquid_assets = non_liquid_assets
if short_term_liabilities is not None:
self.short_term_liabilities = short_term_liabilities
if monthly_expenses is not None:
self.monthly_expenses = monthly_expenses
@property
def lookback_periods(self):
"""Gets the lookback_periods of this FinancialHealthCheckRequest. # noqa: E501
:return: The lookback_periods of this FinancialHealthCheckRequest. # noqa: E501
:rtype: int
"""
return self._lookback_periods
@lookback_periods.setter
def lookback_periods(self, lookback_periods):
"""Sets the lookback_periods of this FinancialHealthCheckRequest.
:param lookback_periods: The lookback_periods of this FinancialHealthCheckRequest. # noqa: E501
:type: int
"""
if lookback_periods is not None and lookback_periods < 1: # noqa: E501
raise ValueError("Invalid value for `lookback_periods`, must be a value greater than or equal to `1`") # noqa: E501
self._lookback_periods = lookback_periods
@property
def gross_annual_income(self):
"""Gets the gross_annual_income of this FinancialHealthCheckRequest. # noqa: E501
:return: The gross_annual_income of this FinancialHealthCheckRequest. # noqa: E501
:rtype: float
"""
return self._gross_annual_income
@gross_annual_income.setter
def gross_annual_income(self, gross_annual_income):
"""Sets the gross_annual_income of this FinancialHealthCheckRequest.
:param gross_annual_income: The gross_annual_income of this FinancialHealthCheckRequest. # noqa: E501
:type: float
"""
if gross_annual_income is not None and gross_annual_income < 0: # noqa: E501
raise ValueError("Invalid value for `gross_annual_income`, must be a value greater than or equal to `0`") # noqa: E501
self._gross_annual_income = gross_annual_income
@property
def ratio_targets(self):
"""Gets the ratio_targets of this FinancialHealthCheckRequest. # noqa: E501
:return: The ratio_targets of this FinancialHealthCheckRequest. # noqa: E501
:rtype: object
"""
return self._ratio_targets
@ratio_targets.setter
def ratio_targets(self, ratio_targets):
"""Sets the ratio_targets of this FinancialHealthCheckRequest.
:param ratio_targets: The ratio_targets of this FinancialHealthCheckRequest. # noqa: E501
:type: object
"""
self._ratio_targets = ratio_targets
@property
def liquid_assets(self):
"""Gets the liquid_assets of this FinancialHealthCheckRequest. # noqa: E501
:return: The liquid_assets of this FinancialHealthCheckRequest. # noqa: E501
:rtype: float
"""
return self._liquid_assets
@liquid_assets.setter
def liquid_assets(self, liquid_assets):
"""Sets the liquid_assets of this FinancialHealthCheckRequest.
:param liquid_assets: The liquid_assets of this FinancialHealthCheckRequest. # noqa: E501
:type: float
"""
if liquid_assets is not None and liquid_assets < 0: # noqa: E501
raise ValueError("Invalid value for `liquid_assets`, must be a value greater than or equal to `0`") # noqa: E501
self._liquid_assets = liquid_assets
@property
def client_id(self):
"""Gets the client_id of this FinancialHealthCheckRequest. # noqa: E501
:return: The client_id of this FinancialHealthCheckRequest. # noqa: E501
:rtype: str
"""
return self._client_id
@client_id.setter
def client_id(self, client_id):
"""Sets the client_id of this FinancialHealthCheckRequest.
:param client_id: The client_id of this FinancialHealthCheckRequest. # noqa: E501
:type: str
"""
self._client_id = client_id
@property
def net_monthly_income(self):
"""Gets the net_monthly_income of this FinancialHealthCheckRequest. # noqa: E501
:return: The net_monthly_income of this FinancialHealthCheckRequest. # noqa: E501
:rtype: float
"""
return self._net_monthly_income
@net_monthly_income.setter
def net_monthly_income(self, net_monthly_income):
"""Sets the net_monthly_income of this FinancialHealthCheckRequest.
:param net_monthly_income: The net_monthly_income of this FinancialHealthCheckRequest. # noqa: E501
:type: float
"""
if net_monthly_income is not None and net_monthly_income < 0: # noqa: E501
raise ValueError("Invalid value for `net_monthly_income`, must be a value greater than or equal to `0`") # noqa: E501
self._net_monthly_income = net_monthly_income
@property
def total_liabilities(self):
"""Gets the total_liabilities of this FinancialHealthCheckRequest. # noqa: E501
:return: The total_liabilities of this FinancialHealthCheckRequest. # noqa: E501
:rtype: float
"""
return self._total_liabilities
@total_liabilities.setter
def total_liabilities(self, total_liabilities):
"""Sets the total_liabilities of this FinancialHealthCheckRequest.
:param total_liabilities: The total_liabilities of this FinancialHealthCheckRequest. # noqa: E501
:type: float
"""
if total_liabilities is not None and total_liabilities < 0: # noqa: E501
raise ValueError("Invalid value for `total_liabilities`, must be a value greater than or equal to `0`") # noqa: E501
self._total_liabilities = total_liabilities
@property
def non_liquid_assets(self):
"""Gets the non_liquid_assets of this FinancialHealthCheckRequest. # noqa: E501
:return: The non_liquid_assets of this FinancialHealthCheckRequest. # noqa: E501
:rtype: float
"""
return self._non_liquid_assets
@non_liquid_assets.setter
def non_liquid_assets(self, non_liquid_assets):
"""Sets the non_liquid_assets of this FinancialHealthCheckRequest.
:param non_liquid_assets: The non_liquid_assets of this FinancialHealthCheckRequest. # noqa: E501
:type: float
"""
if non_liquid_assets is not None and non_liquid_assets < 0: # noqa: E501
raise ValueError("Invalid value for `non_liquid_assets`, must be a value greater than or equal to `0`") # noqa: E501
self._non_liquid_assets = non_liquid_assets
@property
def short_term_liabilities(self):
"""Gets the short_term_liabilities of this FinancialHealthCheckRequest. # noqa: E501
:return: The short_term_liabilities of this FinancialHealthCheckRequest. # noqa: E501
:rtype: float
"""
return self._short_term_liabilities
@short_term_liabilities.setter
def short_term_liabilities(self, short_term_liabilities):
"""Sets the short_term_liabilities of this FinancialHealthCheckRequest.
:param short_term_liabilities: The short_term_liabilities of this FinancialHealthCheckRequest. # noqa: E501
:type: float
"""
if short_term_liabilities is not None and short_term_liabilities < 0: # noqa: E501
raise ValueError("Invalid value for `short_term_liabilities`, must be a value greater than or equal to `0`") # noqa: E501
self._short_term_liabilities = short_term_liabilities
@property
def monthly_expenses(self):
"""Gets the monthly_expenses of this FinancialHealthCheckRequest. # noqa: E501
:return: The monthly_expenses of this FinancialHealthCheckRequest. # noqa: E501
:rtype: float
"""
return self._monthly_expenses
@monthly_expenses.setter
def monthly_expenses(self, monthly_expenses):
"""Sets the monthly_expenses of this FinancialHealthCheckRequest.
:param monthly_expenses: The monthly_expenses of this FinancialHealthCheckRequest. # noqa: E501
:type: float
"""
if monthly_expenses is not None and monthly_expenses < 0: # noqa: E501
raise ValueError("Invalid value for `monthly_expenses`, must be a value greater than or equal to `0`") # noqa: E501
self._monthly_expenses = monthly_expenses
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(FinancialHealthCheckRequest, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FinancialHealthCheckRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 35.54918
| 264
| 0.65998
|
8c362d0ad94d5292fd40b744fe74552c8049b218
| 863
|
py
|
Python
|
project/utils/ctrl_utils.py
|
vinibiavatti1/PythonFlaskCms
|
e43a4db84d1f77a5f66b1f8fcb9dc96e05e6c023
|
[
"MIT"
] | null | null | null |
project/utils/ctrl_utils.py
|
vinibiavatti1/PythonFlaskCms
|
e43a4db84d1f77a5f66b1f8fcb9dc96e05e6c023
|
[
"MIT"
] | null | null | null |
project/utils/ctrl_utils.py
|
vinibiavatti1/PythonFlaskCms
|
e43a4db84d1f77a5f66b1f8fcb9dc96e05e6c023
|
[
"MIT"
] | null | null | null |
"""
Controller utilities.
"""
from werkzeug import utils
from typing import Any
from project.models.property_model import PropertyModel
def escape_dict(data: dict[str, Any]) -> dict[str, Any]:
"""
Escape all fields of dict.
"""
return {k: utils.escape(v) for k, v in data.items()}
def escape(value: Any) -> str:
"""
Alias to werkzeug::utils::escape.
"""
return utils.escape(value)
def result_api_message(type: str, message: str) -> dict[str, Any]:
"""
Generates a common dict response message with type and message.
"""
return {
'type': type,
'message': message
}
def generate_admin_url(context: str, *sections: str) -> str:
"""
Get root url.
"""
url = f'/{context}/admin'
for section in sections:
if section:
url += f'/{section}'
return url
| 20.069767
| 67
| 0.604867
|
dc8fa794ae7e24d227f6fc22cd43055cca2ac0a5
| 20,403
|
py
|
Python
|
utils/00_YOLO_100_attack_10_area.py
|
zzj403/mmd_dap
|
14c0dddc822176f4e92b843f1c083512297b3db4
|
[
"Apache-2.0"
] | null | null | null |
utils/00_YOLO_100_attack_10_area.py
|
zzj403/mmd_dap
|
14c0dddc822176f4e92b843f1c083512297b3db4
|
[
"Apache-2.0"
] | null | null | null |
utils/00_YOLO_100_attack_10_area.py
|
zzj403/mmd_dap
|
14c0dddc822176f4e92b843f1c083512297b3db4
|
[
"Apache-2.0"
] | null | null | null |
"""
Training code for Adversarial patch training using Faster RCNN based on mmdetection
Redo UPC in PyTorch
sp_lr = 0.3
pop = 300
random texture
evo_step_num = 40
"""
import PIL
import cv2
# from load_data import *
import copy
from tqdm import tqdm
from mmdet import __version__
from mmdet.apis import init_detector, inference_detector, show_result_pyplot, get_Image_ready
import mmcv
from mmcv.ops import RoIAlign, RoIPool
from mmcv.parallel import collate, scatter
from mmcv.runner import load_checkpoint
import torch
import gc
import matplotlib.pyplot as plt
from torch import autograd
from torchvision import transforms
import subprocess
# from utils.utils import *
import numpy as np
import pickle
# import patch_config as patch_config
import sys
import time
# from brambox.io.parser.annotation import DarknetParser as anno_darknet_parse
# from utils import *
from skimage.segmentation import slic
from skimage.util import img_as_float
from skimage import io
from skimage.segmentation import mark_boundaries
import os
from PIL import Image
import torch.optim as optim
import torch.nn.functional as F
import skimage.io as io
# from train_patch_frcn_measure_no_stop_gray_step_3_0920 import measure_region_with_attack
import math
from dataset.coco_train_1000_PERSON import CocoTrainPerson
from dataset.sp_geter_dataset import SuperPixelGet
from torch.utils.data import DataLoader,Dataset
from utils.tv_loss import TVLoss
from utils.iou import compute_iou_tensor
from get_convex_env import get_conv_envl
seed = 2020
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
csv_name = 'x_result2.csv'
torch.cuda.set_device(0)
def gkern(kernlen=21, nsig=3):
"""Returns a 2D Gaussian kernel array."""
import scipy.stats as st
x = np.linspace(-nsig, nsig, kernlen)
kern1d = st.norm.pdf(x)
kernel_raw = np.outer(kern1d, kern1d)
kernel = kernel_raw / kernel_raw.sum()
return kernel
class PatchTrainer(object):
def __init__(self, mode):
self.config_file = './configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py'
self.checkpoint_file = '../common_data/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth'
self.Faster_RCNN = init_detector(self.config_file, self.checkpoint_file, device='cpu').cuda()
self.yolo_config_file = './configs/yolo/yolov3_d53_mstrain-416_273e_coco.py'
self.yolo_checkpoint_file = '../common_data/yolov3_d53_mstrain-416_273e_coco-2b60fcd9.pth'
self.YOLOv3 = init_detector(self.yolo_config_file, self.yolo_checkpoint_file, device='cpu').cuda()
self.mean = torch.Tensor([0, 0, 0]).unsqueeze(0).unsqueeze(-1).unsqueeze(-1).cuda()
self.std = torch.Tensor([255., 255., 255.]).unsqueeze(0).unsqueeze(-1).unsqueeze(-1).cuda()
def Transform_Patch(self, patch):
# clamp_patch = torch.clamp(patch, 0.01, 254.99)
clamp_patch = patch
unsqueezed_patch = clamp_patch.unsqueeze(0)
resized_patch = F.interpolate(unsqueezed_patch, (800, 800), mode='bilinear').cuda()
normalized_patch = (resized_patch - self.mean) / self.std
return normalized_patch
def Transform_Patch_batch(self, patch):
clamp_patch = torch.clamp(patch, 0.01, 254.99)
resized_patch = F.interpolate(clamp_patch, (800, 800), mode='bilinear').cuda()
normalized_patch = (resized_patch - self.mean) / self.std
return normalized_patch
def train(self):
"""
Optimize a patch to generate an adversarial example.
:return: Nothing
"""
img_size = 800
batch_size = 1
n_t_op_steps = 5000
max_lab = 14
ATTACK_TASK = 'target'
# TARGET_CLASS = 'dog'
TARGET_CLASS = 16
# ATTACK_TASK = 'untarget'
time_str = time.strftime("%Y%m%d-%H%M%S")
conv_size = 2
kernel1 = gkern(2*conv_size+1, 3).astype(np.float32)
stack_kernel1 = np.stack([kernel1, kernel1, kernel1]).swapaxes(2, 0)
stack_kernel1 = np.expand_dims(stack_kernel1, 3)
stack_kernel1 = torch.from_numpy(stack_kernel1).permute(2,3,0,1).float()
# Dataset prepare
data_obj = CocoTrainPerson(dataType='train2017',num_use=100)
dataloader_obj = DataLoader(data_obj, batch_size=1,shuffle=False) #使用DataLoader加载数据
# img info prepare
img_frcn = get_Image_ready(self.Faster_RCNN, '1016.png')
img_frcn['img_metas'][0][0]['filename'] = None
img_frcn['img_metas'][0][0]['ori_filename'] = None
img_frcn['img_metas'][0][0]['ori_shape'] = None
img_frcn['img_metas'][0][0]['pad_shape'] = None
img_frcn['img_metas'][0][0]['scale_factor'] = None
# attack_area_rate = 0.2
ATTACK_AREA_RATE = 0.1
decay_t_op_step = 100
batch_size_sp = 3
population_num = 300 # 36
optim_step_num = 300
k = 0
for i_batch, batch_data in enumerate(dataloader_obj):
img, mask, bbox, class_label = batch_data[0][0], batch_data[1][0], batch_data[2][0], batch_data[3][0]
# img : 3,500,500
# mask : 500,500
# bbox : x1,y1,w,h
# class_label : tensor[]
img_name = batch_data[4][0]
mask_area = torch.sum(mask)
# if img_name.split('_')[0] != '000000001815':
# continue
print('---------------')
print(img_name)
print('---------------')
# use segment SLIC
base_SLIC_seed_num = 3000
img_np = img.numpy().transpose(1,2,0)
mask_np = mask.numpy()
numSegments = int(base_SLIC_seed_num/(500*500)*torch.sum(mask))
segments_np = slic(image=img_np, n_segments=numSegments, sigma=0, slic_zero=True, mask=mask_np)
segments_tensor = torch.from_numpy(segments_np).float().cuda()
segments_label = torch.unique(segments_tensor)
segments_label = segments_label[1:]
# define theta_m
# pay attention to the center and the boundary
# (0) prepare stack of sp
# (1) find the center sp
# (2) find the boundary sp
# # (0) prepare stack of sp
zero_layer = torch.zeros_like(segments_tensor)
one_layer = torch.ones_like(segments_tensor)
# segments_stack = torch.stack([torch.where(segments_tensor==segments_label[j], segments_tensor, zero_layer) for j in range(segments_label.shape[0])], dim=0)
# # (1) find the center sp
bbox_x1 = bbox[0]
bbox_y1 = bbox[1]
bbox_w = bbox[2]
bbox_h = bbox[3]
bbox_x_c = bbox_x1 + bbox_w/2
bbox_y_c = bbox_y1 + bbox_h/2
bbox_x_c_int = int(bbox_x_c)
bbox_y_c_int = int(bbox_y_c)
# 3 load attack region
load_patch_dir = '../common_data/NES_search_test_1107/'+img_name.split('_')[0]
load_patch_list = os.listdir(load_patch_dir)
load_patch_list.sort()
wat_num_max = 0
for i_name in load_patch_list:
wat_num = int(i_name.split('_')[0])
if wat_num > wat_num_max:
wat_num_max = wat_num
for i_name in load_patch_list:
wat_num = int(i_name.split('_')[0])
if wat_num == wat_num_max:
max_name = i_name
break
load_patch = os.path.join(load_patch_dir, max_name)
load_img = Image.open(load_patch).convert('RGB')
load_img = transforms.ToTensor()(load_img)
region_mask = 2*load_img - img.cpu()
region_mask = torch.sum(region_mask,dim=0)/3
region_mask = torch.where(mask>0, region_mask,torch.zeros_like(region_mask))
attack_region_tmp_pil = transforms.ToPILImage()(region_mask.cpu())
attack_region_tmp_pil.save('013k.png')
# process mask
region_mask_new = torch.zeros_like(region_mask).cuda()
for i in range(segments_label.shape[0]):
sp = segments_label[i]
right_color = (torch.where(segments_tensor==sp,region_mask.cuda(),one_layer*(-10))).cpu()
right_color = torch.mean(right_color[right_color!=-10])
color_layer = torch.ones_like(segments_tensor).fill_(right_color)
region_mask_new = torch.where(segments_tensor==sp, color_layer, region_mask_new)
region_mask_new = region_mask_new
region_mask = region_mask_new
region_mask_unique = torch.unique(region_mask)
for i in range(region_mask_unique.shape[0]):
thres = region_mask_unique[i]
# region_mask_tmp = torch.zeros_like(region_mask)
region_mask_tmp = torch.where(region_mask>thres, one_layer, zero_layer)
pixel_num = torch.sum(region_mask_tmp)
if pixel_num < mask_area * ATTACK_AREA_RATE:
break
attack_region_search_top = region_mask_tmp
attack_region_search_top = get_conv_envl(attack_region_search_top)
attack_region_tmp = attack_region_search_top
attack_region_tmp = attack_region_tmp.cuda()
print('---------------')
print('You have used ', float(torch.sum(attack_region_tmp)/mask_area), 'area.')
print('---------------')
## start at gray
adv_patch_w = torch.zeros(3,500,500).cuda()
adv_patch_w.requires_grad_(True)
optimizer = optim.Adam([
{'params': adv_patch_w, 'lr': 0.1}
], amsgrad=True)
t_op_num = 800
min_max_iou_record = 1
for t_op_step in range(t_op_num):
adv_patch = torch.sigmoid(adv_patch_w)
patched_img = torch.where(attack_region_tmp>0, adv_patch, img.cuda()).unsqueeze(0)
patched_img_255 = patched_img * 255.
patched_img_rsz = F.interpolate(patched_img_255, (416, 416), mode='bilinear').cuda()
patched_img_nom_rsz = (patched_img_rsz - self.mean) / self.std
batch_size_now = patched_img_255.shape[0]
# output
img_new = copy.deepcopy(img_frcn)
img_new['img'][0] = patched_img_nom_rsz
yolo_output = self.YOLOv3(return_loss=False, rescale=False, **img_new)
# output formate is [x1,y1,x2,y2]
# anaylize yolo_output [batch_size]
# [
# ( multi_lvl_bboxes, multi_lvl_cls_scores, multi_lvl_conf_scores )
# multi_lvl_bboxes [ 3 layers ]
# [ [0] 1875, 4
# [1] 7500, 4
# [2] 30000,4 ]
#
# multi_lvl_cls_scores
# [ [0] 1875, 80
# [1] 7500, 80
# [2] 30000,80 ]
#
# multi_lvl_conf_scores
# [ [0] 1875
# [1] 7500
# [2] 30000 ]
# * batch_size
# ]
# merge yolo output
multi_lvl_bboxes_batch = []
multi_lvl_cls_scores_batch = []
multi_lvl_conf_scores_batch = []
for i_b in range(batch_size_now):
multi_lvl_bboxes_batch += yolo_output[i_b][0]
multi_lvl_cls_scores_batch += yolo_output[i_b][1]
multi_lvl_conf_scores_batch += yolo_output[i_b][2]
multi_lvl_bboxes_batch = torch.cat(multi_lvl_bboxes_batch, dim=0)
multi_lvl_cls_scores_batch = torch.cat(multi_lvl_cls_scores_batch, dim=0)
multi_lvl_conf_scores_batch = torch.cat(multi_lvl_conf_scores_batch, dim=0)
# objectness loss
objectness_loss = torch.sum(multi_lvl_conf_scores_batch[multi_lvl_conf_scores_batch>0.05])
# class loss
attack_class_score = multi_lvl_cls_scores_batch[:,class_label]
# attack_class_score = attack_class_score[attack_class_score>0.5]
attack_class_score = torch.sort(attack_class_score, descending=True)[0][:30]
cls_loss = torch.sum(attack_class_score)
# target class loss
attack_class_score_target = multi_lvl_cls_scores_batch[:,16]
attack_class_score_target = attack_class_score_target[multi_lvl_conf_scores_batch>0.5]
attack_class_score_target = attack_class_score_target[attack_class_score_target<0.9]
attack_class_score_target = torch.sort(attack_class_score_target, descending=True)[0][:30]
cls_target_loss = - torch.sum(attack_class_score_target)
# iou loss
bbox_x1 = bbox[0]/500*416
bbox_y1 = bbox[1]/500*416
bbox_w = bbox[2]/500*416
bbox_h = bbox[3]/500*416
ground_truth_bbox = [bbox_x1, bbox_y1, bbox_x1+bbox_w, bbox_y1 + bbox_h]
ground_truth_bbox = torch.Tensor(ground_truth_bbox).unsqueeze(0).cuda()
iou_all = compute_iou_tensor(multi_lvl_bboxes_batch, ground_truth_bbox)
iou_positive = iou_all[iou_all>0.05]
iou_loss = torch.sum(iou_all)
# class loss selected by IoU
attack_class_score = multi_lvl_cls_scores_batch[:,class_label]
attack_class_score_iou = attack_class_score[iou_all>0.05]
attack_class_score_iou_sort = torch.sort(attack_class_score_iou, descending=True)[0][:30]
cls_iou_loss = torch.sum(attack_class_score_iou_sort)
# rpn loss
# : to make every proposal smaller to its center
rpn_ctx = (multi_lvl_bboxes_batch[:,0] + multi_lvl_bboxes_batch[:,2])/2
rpn_cty = (multi_lvl_bboxes_batch[:,1] + multi_lvl_bboxes_batch[:,3])/2
rpn_box = multi_lvl_bboxes_batch[:,:4]
rpn_ctx = rpn_ctx.unsqueeze(-1)
rpn_cty = rpn_cty.unsqueeze(-1)
rpn_box_target = torch.cat([rpn_ctx,rpn_cty,rpn_ctx,rpn_cty], dim=-1)
rpn_loss = l1_norm(multi_lvl_conf_scores_batch.unsqueeze(-1).repeat(1,4)*(multi_lvl_bboxes_batch - rpn_box_target))
# total_loss = cls_loss + objectness_loss + rpn_loss + cls_target_loss + cls_iou_loss
# total_loss = cls_target_loss*100 + cls_iou_loss*100 #+ rpn_loss
total_loss = cls_iou_loss*100 + rpn_loss
total_loss.backward()
optimizer.step()
optimizer.zero_grad()
# ----------------------------------
# ------------------------
# early stop
if t_op_step %30:
print( t_op_step,
'iou', float(torch.max(iou_all)),
'cls', float(torch.max(attack_class_score)),
'obj', float(torch.max(multi_lvl_conf_scores_batch)))
#test
patched_img_cpu = patched_img.cpu().squeeze()
test_confidence_threshold = 0.45
iou_max = torch.max(iou_all)
if iou_max < 0.05 or torch.max(multi_lvl_conf_scores_batch) < 0.45:
print('Break at',t_op_step,'iou final max:', torch.max(iou_all))
# save image
patched_img_cpu_pil = transforms.ToPILImage()(patched_img_cpu)
out_file_path = os.path.join('../common_data/NES_attack/YOLO3/success'+str(int(ATTACK_AREA_RATE*100)), img_name)
patched_img_cpu_pil.save(out_file_path)
break
# report
max_iou = torch.max(iou_all)
if max_iou < min_max_iou_record:
min_max_iou_record = max_iou
txt_save_dir = '../common_data/NES_attack/YOLO3/iou'+str(int(ATTACK_AREA_RATE*100))
txt_save_path = os.path.join(txt_save_dir, img_name.split('.')[0]+'.txt')
with open(txt_save_path,'w') as f:
text = str(float(max_iou))
f.write(text)
if t_op_step % 100 == 0:
iou_sort = torch.sort(iou_all,descending=True)[0][:6].detach().clone().cpu()
print(t_op_step, 'iou t-cls :', max_iou)
print()
# ------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------
def generate_patch(self, type):
"""
Generate a random patch as a starting point for optimization.
:param type: Can be 'gray' or 'random'. Whether or not generate a gray or a random patch.
:return:
"""
if type == 'gray':
adv_patch_cpu = torch.full((3, 500, 500), 0.5)
elif type == 'random':
adv_patch_cpu = torch.rand((3, 500, 500))
if type == 'trained_patch':
patchfile = 'patches/object_score.png'
patch_img = Image.open(patchfile).convert('RGB')
patch_size = self.config.patch_size
tf = transforms.Resize((patch_size, patch_size))
patch_img = tf(patch_img)
tf = transforms.ToTensor()
adv_patch_cpu = tf(patch_img)
return adv_patch_cpu
def read_image(self, path):
"""
Read an input image to be used as a patch
:param path: Path to the image to be read.
:return: Returns the transformed patch as a pytorch Tensor.
"""
patch_img = Image.open(path).convert('RGB')
tf = transforms.Resize((self.config.patch_size, self.config.patch_size))
patch_img = tf(patch_img)
tf = transforms.ToTensor()
adv_patch_cpu = tf(patch_img)
return adv_patch_cpu
def connected_domin_detect(input_img):
from skimage import measure
# detection
if input_img.shape[0] == 3:
input_img_new = (input_img[0] + input_img[1] + input_img[2])
else:
input_img_new = input_img
ones = torch.Tensor(input_img_new.size()).fill_(1)
zeros = torch.Tensor(input_img_new.size()).fill_(0)
input_map_new = torch.where((input_img_new != 0), ones, zeros)
# img = transforms.ToPILImage()(input_map_new.detach().cpu())
# img.show()
input_map_new = input_map_new.cpu()
labels = measure.label(input_map_new[:, :], background=0, connectivity=2)
label_max_number = np.max(labels)
return float(label_max_number)
def get_obj_min_score(boxes):
if type(boxes[0][0]) is list:
min_score_list = []
for i in range(len(boxes)):
score_list = []
for j in range(len(boxes[i])):
score_list.append(boxes[i][j][4])
min_score_list.append(min(score_list))
return np.array(min_score_list)
else:
score_list = []
for j in range(len(boxes)):
score_list.append(boxes[j][4])
return np.array(min(score_list))
def l2_norm(tensor):
return torch.sqrt(torch.sum(torch.pow(tensor,2)))
def l1_norm(tensor):
return torch.sum(torch.abs(tensor))
def main():
trainer = PatchTrainer('paper_obj')
trainer.train()
if __name__ == '__main__':
main()
| 37.505515
| 169
| 0.570945
|
4f29b5781123726a60bbcb4b66b599aa0843717a
| 4,829
|
py
|
Python
|
checks_available/check_hbase_regionserver.py
|
net-angels/puypuy
|
23fb56f519989f899a7a3aeee5a6d403afee4809
|
[
"Apache-2.0"
] | 1
|
2017-05-04T08:43:12.000Z
|
2017-05-04T08:43:12.000Z
|
checks_available/check_hbase_regionserver.py
|
net-angels/puypuy
|
23fb56f519989f899a7a3aeee5a6d403afee4809
|
[
"Apache-2.0"
] | null | null | null |
checks_available/check_hbase_regionserver.py
|
net-angels/puypuy
|
23fb56f519989f899a7a3aeee5a6d403afee4809
|
[
"Apache-2.0"
] | null | null | null |
import lib.record_rate
import lib.commonclient
import lib.puylogger
import lib.getconfig
import datetime
import json
hbase_region_url = lib.getconfig.getparam('HBase-Region', 'jmx')
cluster_name = lib.getconfig.getparam('SelfConfig', 'cluster_name')
check_type = 'hbase'
def runcheck():
local_vars = []
try:
stats_json = json.loads(lib.commonclient.httpget(__name__, hbase_region_url))
stats_keys = stats_json['beans']
node_rated_keys=('totalRequestCount','readRequestCount','writeRequestCount', 'Delete_num_ops', 'Mutate_num_ops', 'FlushTime_num_ops',
'GcTimeMillis','compactedCellsCount', 'majorCompactedCellsCount', 'compactedCellsSize', 'majorCompactedCellsSize',
'blockCacheHitCount', 'blockCacheMissCount', 'blockCacheEvictionCount')
node_stuck_keys=('GcCount','HeapMemoryUsage', 'OpenFileDescriptorCount', 'blockCacheCount')
rate=lib.record_rate.ValueRate()
timestamp = int(datetime.datetime.now().strftime("%s"))
for stats_x in range(0, len(stats_keys)):
for k, v in enumerate(('java.lang:type=GarbageCollector,name=ConcurrentMarkSweep', 'java.lang:type=GarbageCollector,name=ParNew')):
if v in stats_keys[stats_x]['name']:
if k is 0:
cms_key='hregion_heap_cms_lastgcinfo'
cms_value=stats_keys[stats_x]['LastGcInfo']['duration']
local_vars.append({'name': cms_key, 'timestamp': timestamp, 'value': cms_value, 'check_type': check_type})
if k is 1:
parnew_key='hregion_heap_parnew_lastgcinfo'
parnew_value=stats_keys[stats_x]['LastGcInfo']['duration']
local_vars.append({'name': parnew_key, 'timestamp': timestamp, 'value': parnew_value, 'check_type': check_type})
for stats_x in range(0, len(stats_keys)):
for k, v in enumerate(('java.lang:type=GarbageCollector,name=G1 Young Generation', 'java.lang:type=GarbageCollector,name=G1 Old Generation')):
if v in stats_keys[stats_x]['name']:
if k is 0:
g1_young_key='hregion_heap_g1_young_lastgcinfo'
g1_young_value=stats_keys[stats_x]['LastGcInfo']['duration']
local_vars.append({'name': g1_young_key, 'timestamp': timestamp, 'value': g1_young_value, 'check_type': check_type})
if k is 1:
if stats_keys[stats_x]['LastGcInfo'] is not None:
g1_old_key='hregion_heap_g1_old_lastgcinfo'
g1_old_value=stats_keys[stats_x]['LastGcInfo']['duration']
local_vars.append({'name': g1_old_key, 'timestamp': timestamp, 'value': g1_old_value, 'check_type': check_type})
else:
g1_old_key='hregion_heap_g1_old_lastgcinfo'
g1_old_value=0
local_vars.append({'name': g1_old_key, 'timestamp': timestamp, 'value': g1_old_value, 'check_type': check_type})
for stats_index in range(0, len(stats_keys)):
for values in node_rated_keys:
if values in stats_keys[stats_index]:
if values in node_rated_keys:
myvalue=stats_keys[stats_index][values]
values_rate=rate.record_value_rate('hregion_'+values, myvalue, timestamp)
if values_rate >= 0:
local_vars.append({'name': 'hregion_node_'+values.lower(), 'timestamp': timestamp, 'value': values_rate, 'check_type': check_type, 'chart_type': 'Rate'})
for values in node_stuck_keys:
if values in stats_keys[stats_index]:
if values == 'HeapMemoryUsage':
heap_metrics=('max', 'init', 'committed', 'used')
for heap_values in heap_metrics:
local_vars.append({'name': 'hregion_heap_'+heap_values.lower(), 'timestamp': timestamp, 'value': stats_keys[stats_index][values][heap_values], 'check_type': check_type})
elif values == 'GcCount':
local_vars.append({'name': 'hregion_node_' + values.lower(), 'timestamp': timestamp, 'value': stats_keys[stats_index][values], 'check_type': check_type, 'reaction': -3})
else:
local_vars.append({'name': 'hregion_node_'+values.lower(), 'timestamp': timestamp, 'value': stats_keys[stats_index][values], 'check_type': check_type})
return local_vars
except Exception as e:
lib.puylogger.print_message(__name__ + ' Error : ' + str(e))
pass
| 61.910256
| 197
| 0.601988
|
d2e0188d0b1cbdba1774f1b56ff44ebc483d53ff
| 964
|
py
|
Python
|
backend/agenda/models.py
|
cruznicollas/Medicar
|
50733191056cffd76b12db804d9aaaea4a5ef8c8
|
[
"MIT"
] | 1
|
2021-08-16T12:16:07.000Z
|
2021-08-16T12:16:07.000Z
|
backend/agenda/models.py
|
cruznicollas/Medicar
|
50733191056cffd76b12db804d9aaaea4a5ef8c8
|
[
"MIT"
] | null | null | null |
backend/agenda/models.py
|
cruznicollas/Medicar
|
50733191056cffd76b12db804d9aaaea4a5ef8c8
|
[
"MIT"
] | null | null | null |
from django.db import models
from medico.models import Medico
# Create your models here.
class Horario(models.Model):
horario = models.TimeField(auto_now=False, auto_now_add=False)
def __str__(self):
return f'{self.horario}'
class Agenda(models.Model):
"""
classe para a agenda do médico
"""
id = models.AutoField(primary_key=True)
medico = models.ForeignKey(
Medico,
on_delete=models.PROTECT,
help_text='Selecione um médico')
dia = models.DateField()
horario = models.ManyToManyField(
Horario, help_text='Escolha os Horários')
def horarios(self):
horarios = []
for p in self.horario.all():
horarios.append(p)
return horarios
class Meta:
unique_together = ('medico', 'dia')
verbose_name = "Agenda"
verbose_name_plural = "Agendas"
ordering = ['dia']
def __str__(self):
return f'{self.medico}'
| 24.717949
| 66
| 0.626556
|
caf132e1f35d0903a18f7899a60dba58308fb9ef
| 13,445
|
py
|
Python
|
Retinexdip.py
|
zhaozunjin/RetinexDIP
|
05ab78b725a5c9a6a014a38bfdb2778c7535718b
|
[
"MIT"
] | 16
|
2021-06-28T03:18:53.000Z
|
2022-03-27T11:30:24.000Z
|
Retinexdip.py
|
zhaozunjin/RetinexDIP
|
05ab78b725a5c9a6a014a38bfdb2778c7535718b
|
[
"MIT"
] | 1
|
2022-03-23T03:33:39.000Z
|
2022-03-23T03:40:42.000Z
|
Retinexdip.py
|
zhaozunjin/RetinexDIP
|
05ab78b725a5c9a6a014a38bfdb2778c7535718b
|
[
"MIT"
] | 7
|
2021-09-24T11:02:27.000Z
|
2022-03-27T11:30:25.000Z
|
from collections import namedtuple
from net import *
from net.downsampler import *
from net.losses import StdLoss, GradientLoss, ExtendedL1Loss, GrayLoss
from net.losses import ExclusionLoss, TVLoss
from net.noise import get_noise
import matplotlib.pyplot as plt
from PIL import Image
from skimage import exposure,color
import numpy as np
import math
import torch
import torchvision
import cv2
from scipy import misc
from torchvision import transforms
from utils.vggloss import VGG16
from utils.sgld import SGLD
import argparse
from glob import glob
import os
import time
parser = argparse.ArgumentParser()
parser.add_argument("--input", "-i", type=str, default='data/Test', help='test image folder')
parser.add_argument("--result", "-r", type=str, default='./result', help='result folder')
arg = parser.parse_args()
EnhancementResult = namedtuple("EnhancementResult", ['reflection', 'illumination'])
torch.manual_seed(0)
torch.cuda.manual_seed(0)
torch.cuda.manual_seed_all(0)
class Enhancement(object):
def __init__(self, image_name, image, plot_during_training=True, show_every=10, num_iter=300):
self.image = image
self.img = image
self.size = image.size
self.image_np = None
self.images_torch = None
self.plot_during_training = plot_during_training
# self.ratio = ratio
self.psnrs = []
self.show_every = show_every
self.image_name = image_name
self.num_iter = num_iter
self.loss_function = None
# self.ratio_net = None
self.parameters = None
self.learning_rate = 0.01
self.input_depth = 3 # This value could affect the performance. 3 is ok for natural image, if your
#images are extremely dark, you may consider 8 for the value.
self.data_type = torch.cuda.FloatTensor
# self.data_type = torch.FloatTensor
self.reflection_net_inputs = None
self.illumination_net_inputs = None
self.original_illumination = None
self.original_reflection = None
self.reflection_net = None
self.illumination_net = None
self.total_loss = None
self.reflection_out = None
self.illumination_out = None
self.current_result = None
self.best_result = None
self._init_all()
def _init_all(self):
self._init_images()
self._init_decomposition()
self._init_nets()
self._init_inputs()
self._init_parameters()
self._init_losses()
def _maxRGB(self):
'''
self.image: pil image, input low-light image
:return: np, initial illumnation
'''
(R, G, B) = self.image.split()
I_0 = np.array(np.maximum(np.maximum(R, G), B))
return I_0
def _init_decomposition(self):
temp = self._maxRGB() # numpy
# get initial illumination map
self.original_illumination = np.clip(np.asarray([temp for _ in range(3)]),1,255)/255
# self.original_illumination = np.clip(temp,1, 255) / 255
# get initial reflection
self.original_reflection = self.image_np / self.original_illumination
self.original_illumination = np_to_torch(self.original_illumination).type(self.data_type)
self.original_reflection = np_to_torch(np.asarray(self.original_reflection)).type(self.data_type)
# print(self.original_reflection.shape)namedtuple
# print(self.original_illumination.shape)
def _init_images(self):
#self.images = create_augmentations(self.image)
# self.images_torch = [np_to_torch(image).type(torch.cuda.FloatTensor) for image in self.images]
self.image =transforms.Resize((512,512))(self.image)
self.image_np = pil_to_np(self.image) # pil image to numpy
self.image_torch = np_to_torch(self.image_np).type(self.data_type)
# print(self.size)
# print((self.image_torch.shape[2],self.image_torch.shape[3]))
def _init_inputs(self):
if self.image_torch is not None:
size = (self.image_torch.shape[2], self.image_torch.shape[3])
# print(size)
input_type = 'noise'
# input_type = 'meshgrid'
self.reflection_net_inputs = get_noise(self.input_depth,
input_type, size).type(self.data_type).detach()
# misc.imsave('out/input_illumination.png',
# misc.imresize(torch_to_np(self.reflection_net_inputs).transpose(1, 2, 0),(self.size[1],self.size[0])))
self.illumination_net_inputs = get_noise(self.input_depth,
input_type, size).type(self.data_type).detach()
def _init_parameters(self):
self.parameters = [p for p in self.reflection_net.parameters()] + \
[p for p in self.illumination_net.parameters()]
def weight_init(self, m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0.0, 0.5 * math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif classname.find('BatchNorm') != -1:
m.weight.data.fill_(1)
m.bias.data.zero_()
elif classname.find('Linear') != -1:
n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data = torch.ones(m.bias.data.size())
def _init_nets(self):
pad = 'zero'
self.reflection_net = skip(self.input_depth, 3,
num_channels_down = [8, 16, 32, 64,128],
num_channels_up = [8, 16, 32, 64,128],
num_channels_skip = [0, 0, 0, 0, 0],
filter_size_down = 3, filter_size_up = 3, filter_skip_size=1,
upsample_mode='bilinear',
downsample_mode='avg',
need_sigmoid=True, need_bias=True, pad=pad)
self.reflection_net.apply(self.weight_init).type(self.data_type)
self.illumination_net = skip(self.input_depth, 3,
num_channels_down = [8, 16, 32, 64],
num_channels_up = [8, 16, 32, 64],
num_channels_skip = [0, 0, 0, 0],
filter_size_down = 3, filter_size_up = 3, filter_skip_size=1,
upsample_mode='bilinear',
downsample_mode='avg',
need_sigmoid=True, need_bias=True, pad=pad)
self.illumination_net.apply(self.weight_init).type(self.data_type)
def _init_losses(self):
self.l1_loss = nn.SmoothL1Loss().type(self.data_type) # for illumination
self.mse_loss = nn.MSELoss().type(self.data_type) # for reflection and reconstruction
self.exclusion_loss = ExclusionLoss().type(self.data_type)
self.tv_loss = TVLoss().type(self.data_type)
self.gradient_loss = GradientLoss().type(self.data_type)
def optimize(self):
# torch.backends.cudnn.enabled = True
# torch.backends.cudnn.benchmark = True
# optimizer = SGLD(self.parameters, lr=self.learning_rate)
optimizer = torch.optim.Adam(self.parameters, lr=self.learning_rate)
print("Processing: {}".format(self.image_name.split("/")[-1]))
start = time.time()
for j in range(self.num_iter):
optimizer.zero_grad()
self._optimization_closure(500,499)
if j==499:
self._obtain_current_result(499)
if self.plot_during_training:
self._plot_closure(j)
optimizer.step()
end = time.time()
print("time:%.4f"%(end-start))
cv2.imwrite(self.image_name, self.best_result)
def _get_augmentation(self, iteration):
if iteration % 2 == 1:
return 0
# return 0
iteration //= 2
return iteration % 8
def _optimization_closure(self, num_iter, step):
reg_noise_std = 1 / 10000.
aug = self._get_augmentation(step)
if step == num_iter - 1:
aug = 0
illumination_net_input = self.illumination_net_inputs + \
(self.illumination_net_inputs.clone().normal_() * reg_noise_std)
reflection_net_input = self.reflection_net_inputs + \
(self.reflection_net_inputs.clone().normal_() * reg_noise_std)
self.illumination_out = self.illumination_net(illumination_net_input)
self.reflection_out = self.reflection_net(reflection_net_input)
# weighted with the gradient of latent reflectance
self.total_loss = 0.5*self.tv_loss(self.illumination_out, self.reflection_out)
self.total_loss += 0.0001*self.tv_loss(self.reflection_out)
self.total_loss += self.l1_loss(self.illumination_out, self.original_illumination)
self.total_loss += self.mse_loss(self.illumination_out*self.reflection_out, self.image_torch)
self.total_loss.backward()
def _obtain_current_result(self, step):
"""
puts in self.current result the current result.
also updates the best result
:return:
"""
if step == self.num_iter - 1 or step % 8 == 0:
reflection_out_np = np.clip(torch_to_np(self.reflection_out),0,1)
illumination_out_np = np.clip(torch_to_np(self.illumination_out),0,1)
# psnr = compare_psnr(np.clip(self.image_np,0,1), reflection_out_np * illumination_out_np)
# self.psnrs.append(psnr)
self.current_result = EnhancementResult(reflection=reflection_out_np, illumination=illumination_out_np)
# if self.best_result is None or self.best_result.psnr < self.current_result.psnr:
# self.best_result = self.current_result
def _plot_closure(self, step):
print('Iteration {:5d} Loss {:5f}'.format(step,self.total_loss.item()))
if step % self.show_every == self.show_every - 1:
# plot_image_grid("left_right_{}".format(step),
# [self.current_result.reflection, self.current_result.illumination])
# misc.imsave('out/illumination.png',
# misc.imresize(torch_to_np(self.illumination_out).transpose(1, 2, 0),(self.size[1],self.size[0])))
misc.imsave('output/reflection/reflection-{}.png'.format(step),
misc.imresize(torch_to_np(self.reflection_out).transpose(1, 2, 0), (self.size[1],self.size[0])))
self.get_enhanced(step)
def gamma_trans(self, img, gamma):
gamma_table = [np.power(x / 255.0, gamma) * 255.0 for x in range(256)]
gamma_table = np.round(np.array(gamma_table)).astype(np.uint8)
return cv2.LUT(img, gamma_table)
def adjust_gammma(self,img_gray):
# mean = np.mean(img_gray)
# gamma_val = math.log10(0.5) / math.log10(mean / 255)
# print(gamma_val)
image_gamma_correct = self.gamma_trans(img_gray, 0.5)
return image_gamma_correct
def get_enhanced(self, step, flag=False):
(R, G, B) = self.img.split()
ini_illumination = torch_to_np(self.illumination_out).transpose(1, 2, 0)
ini_illumination = misc.imresize(ini_illumination, (self.size[1], self.size[0]))
# print(ini_illumination.shape)
ini_illumination = np.max(ini_illumination, axis=2)
cv2.imwrite('output/illumination/illumination-{}.png'.format(step), ini_illumination)
# If the input image is extremely dark, setting the flag as True can produce promising result.
if flag==True:
ini_illumination = np.clip(np.max(ini_illumination, axis=2), 0.0000002, 255)
else:
ini_illumination = np.clip(self.adjust_gammma(ini_illumination), 0.0000002, 255)
R = R / ini_illumination
G = G / ini_illumination
B = B / ini_illumination
self.best_result = np.clip(cv2.merge([B, G, R])*255, 0.02, 255).astype(np.uint8)
cv2.imwrite('output/result-{}.png'.format(step), self.best_result)
def lowlight_enhancer(image_name, image):
s = Enhancement(image_name, image)
s.optimize()
if __name__ == "__main__":
input_root = arg.input
output_root = arg.result
datasets = ['DICM', 'ExDark', 'Fusion', 'LIME', 'NPEA', 'Nasa', 'VV']
# datasets = ['images-for-computing-time']
for dataset in datasets:
input_folder = os.path.join(input_root, dataset)
output_folder = os.path.join(output_root, dataset)
if not os.path.exists(output_folder):
os.makedirs(output_folder)
# print(output_folder)
path = glob(input_folder + '/*.*')
path.sort()
for i in range(len(path)):
filename = os.path.basename(path[i])
img_path = os.path.join(input_folder, filename)
img_path_out = os.path.join(output_folder, filename)
img = Image.open(img_path).convert('RGB') #LOLdataset/eval15/low/1.png
lowlight_enhancer(img_path_out, img)
# input_folder = 'data/images-for-computing-time'
# output_folder = './result'
# filename = "BladeImg048_LT.BMP"
# img_path = os.path.join(input_folder, filename)
# img_path_out = os.path.join(output_folder, filename)
# img = Image.open(img_path).convert('RGB') # LOLdataset/eval15/low/1.png
# lowlight_enhancer(img_path_out, img)
| 40.866261
| 136
| 0.635329
|
3cdb8ba8e3212a27303a856167ddf2d5ac05d6f0
| 8,620
|
py
|
Python
|
circa/formats/nec.py
|
marcan/circa
|
e517a268f2b40b9a1f8f04696982c5c3c80df960
|
[
"X11"
] | 14
|
2020-11-22T23:34:26.000Z
|
2022-03-21T21:03:45.000Z
|
circa/formats/nec.py
|
marcan/circa
|
e517a268f2b40b9a1f8f04696982c5c3c80df960
|
[
"X11"
] | null | null | null |
circa/formats/nec.py
|
marcan/circa
|
e517a268f2b40b9a1f8f04696982c5c3c80df960
|
[
"X11"
] | null | null | null |
#!/usr/bin/python
import statistics
from ..core import IRCode, DataError, DecodeError
from ..util import to_bits_lsb, from_bits_lsb
__all__ = ["NECCode"]
class NECCode(IRCode):
NAMES = ["nec"]
def params(self):
yield from (i for i in super().params() if i[0] not in "packet_interval")
yield ("pulse_time", "tp", int, 563)
yield ("space_time_0", "t0", int, self.pulse_time)
yield ("space_time_1", "t1", int, self.pulse_time * 3)
yield ("preamble_time_high", "ph", int, self.pulse_time * 16)
yield ("preamble_time_low", "pl", int, self.preamble_time_high // 2)
yield ("repeat_time_high", "rh", int, self.preamble_time_high)
yield ("repeat_time_low", "rl", int, self.preamble_time_low // 2)
yield ("address_bytes", "a", int, 0)
yield ("packet_gap", "pg", int, 0)
yield ("packet_interval", "pi", int, self.pulse_time * 192 if self.packet_gap == 0 else 0)
yield ("repeat_interval", "ri", int, self.packet_interval)
yield ("burst_count", "b", int, 0)
yield ("burst_time_high", "bh", int, self.pulse_time)
yield ("burst_time_low", "bl", int, self.pulse_time)
yield ("burst_gap", "bg", int, self.pulse_time * 60)
def _parse_packet(self, packet):
for i in packet:
if not isinstance(i, int) or not 0 <= i <= 255:
raise DataError("Invalid data byte: %r" % i)
return packet
def _parse_one_string_data(self, s):
return [int(i, 16) for i in s.split(",")]
def _format_one_string_data(self, d):
return ",".join("%02x" % i for i in d)
def encode_packet(self, packet, state=None):
if self.address_bytes < 0:
data, payload = packet, []
else:
data, payload = packet[:self.address_bytes], packet[self.address_bytes:]
for i in payload:
data.append(i)
data.append(i ^ 0xff)
pulses = [self.preamble_time_high, self.preamble_time_low]
for byte in data:
for bit in to_bits_lsb(byte, 8):
pulses.append(self.pulse_time)
if bit:
pulses.append(self.space_time_1)
else:
pulses.append(self.space_time_0)
pulses.append(self.pulse_time)
pulses.append(max(self.pulse_time, self.packet_gap))
padding = 0
return 1, pulses
def to_raw(self, state=None):
raw_code = super().to_raw(state)
raw_code.packet_interval = 0
if self.burst_count:
burst = [self.burst_time_high, self.burst_time_low] * self.burst_count
burst[-1] = self.burst_gap
raw_code.data.insert(0, ({"count": 1, "pulses": burst}))
if self.count > 1:
raw_code.data.append({"count": self.count - 1, "pulses": [
self.repeat_time_high, self.repeat_time_low, self.pulse_time,
max(self.pulse_time, self.repeat_interval - self.repeat_time_high - self.repeat_time_low - self.pulse_time)
]})
raw_code.count = 1
return raw_code
def parse_code(self, code):
self.fc = code.fc
code = code.to_raw().flatten(no_repeats=True)
pulses = code.data[0]["pulses"]
self._reset_samples()
packets = []
p = 0
repeats = 0
# Try to detect an initial burst...
if len(pulses) >= 4:
bmin = min(pulses[1:4])
bmax = max(pulses[:4])
bavg = sum(pulses[:4]) / 4
if pulses[0] < bavg * 1.5 and abs(bmin - bavg) / bavg < 0.3 and (bmax - bavg) / bavg < 0.3:
while p < (len(pulses)-1):
bh, bl = pulses[p:p + 2]
if bh > 2 * bavg:
self._sample("burst_gap", pulses[p - 1])
break
self._sample("burst_time_high", bh)
self.burst_count += 1
p += 2
if bl > 2 * bavg:
self._sample("burst_gap", bl)
break
self._sample("burst_time_low", bl)
if len(pulses) <= p:
raise DataError("No data")
last_packet_length = None
while p < (len(pulses)-1):
packet_start = p
hh, hl = pulses[p:p + 2]
p += 2
bits = []
if p >= len(pulses):
# runt end pulse?
if not packets:
raise DataError("No data")
break
if packets and not repeats:
self._sample("packet_gap", pulses[p - 3])
while p < (len(pulses)-1):
mark, space = pulses[p:p+2]
if (bits or packets) and mark > self.pulse_time * 2:
break
p += 2
self._sample("pulse_time", mark)
if space < self.pulse_time * 2:
bits.append(0)
self._sample("space_time_0", space)
elif space < self.pulse_time * 6:
bits.append(1)
self._sample("space_time_1", space)
else:
bits.append(0) # end bit?
break
if (len(bits) % 8) != 1:
raise DataError("Bit count not an even number of bytes")
if len(bits) > 1:
self._sample("preamble_time_high", hh)
self._sample("preamble_time_low", hl)
if repeats > 0:
raise DataError("Data packet after a repeat packet")
packets.append([from_bits_lsb(bits[i:i+8]) for i in range(0, len(bits) - 1, 8)])
if last_packet_length:
self._sample("packet_interval", last_packet_length)
else:
self._sample("repeat_time_high", hh)
self._sample("repeat_time_low", hl)
if not packets:
raise DataError("Repeat packet with no data packet")
if repeats > 0:
self._sample("repeat_interval", last_packet_length)
else:
self._sample("packet_interval", last_packet_length)
repeats += 1
last_packet_length = sum(pulses[packet_start:p])
# Packet spacing can be specified with either an interval or a gap.
# Pick whichever one works best.
if "packet_interval" in self._samples and "packet_gap" in self._samples:
if len(self._samples["packet_interval"]) > 1 and len(self._samples["packet_gap"]) > 1:
vi = statistics.variance(self._samples["packet_interval"])
vg = statistics.variance(self._samples["packet_gap"])
if vi > vg:
del self._samples["packet_interval"]
else:
del self._samples["packet_gap"]
else:
# Just two packets, go with gap
del self._samples["packet_interval"]
self._sample_default("packet_gap", 0)
self._sample_default("packet_interval", self.pulse_time * 192 if self.packet_gap == 0 else 0)
self._sample_default("repeat_interval", self.packet_interval)
self._sample_default("repeat_time_high", self.preamble_time_high)
self._sample_default("repeat_time_low", self.preamble_time_low // 2)
self._sample_default("burst_time_high", self.pulse_time)
self._sample_default("burst_time_low", self.pulse_time)
self._sample_default("burst_gap", self.pulse_time * 60)
self.address_bytes = len(packets[0]) & 1 # if odd number of bytes, at least one address byte is required
for packet in packets:
inv = 0
for b1, b2 in zip(*([iter(packet[::-1])] * 2)):
if b1 == b2 ^ 0xff:
inv += 2
else:
break
ab = len(packet) - inv
if (ab ^ self.address_bytes) & 1: # if the length parity is off, bail
self.address_bytes = -1
break
self.address_bytes = max(ab, self.address_bytes)
for packet in packets:
if len(packet) <= self.address_bytes:
self.address_bytes = -1
break
if self.address_bytes != -1:
packets = [packet[:self.address_bytes] + packet[self.address_bytes::2] for packet in packets]
self.data = packets
self.count = repeats + 1
| 38.828829
| 123
| 0.533759
|
164920c75c621f7ae804a257847c1c32c33740f6
| 4,887
|
py
|
Python
|
modules/anti_tamper.py
|
bobombobo/python-obfuscator
|
c92271a6a482d74d3ac5fc3525261f486914ce16
|
[
"CC0-1.0"
] | 1
|
2021-05-18T16:04:31.000Z
|
2021-05-18T16:04:31.000Z
|
modules/anti_tamper.py
|
bobombobo/python-obfuscator
|
c92271a6a482d74d3ac5fc3525261f486914ce16
|
[
"CC0-1.0"
] | 1
|
2021-05-31T08:09:47.000Z
|
2021-06-01T16:12:05.000Z
|
modules/anti_tamper.py
|
bobombobo/python-obfuscator
|
c92271a6a482d74d3ac5fc3525261f486914ce16
|
[
"CC0-1.0"
] | null | null | null |
import time
import hashlib
import requests
import random
import string
import getopt
import http.cookiejar
import sys
import urllib.parse
import urllib.request
from http.cookies import SimpleCookie
from json import loads as json_loads
from os import environ
def gen_anti_code(code):
_headers = {"Referer": 'https://rentry.co'}
class UrllibClient:
"""Simple HTTP Session Client, keeps cookies."""
def __init__(self):
self.cookie_jar = http.cookiejar.CookieJar()
self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookie_jar))
urllib.request.install_opener(self.opener)
def get(self, url, headers={}):
request = urllib.request.Request(url, headers=headers)
return self._request(request)
def post(self, url, data=None, headers={}):
postdata = urllib.parse.urlencode(data).encode()
request = urllib.request.Request(url, postdata, headers)
return self._request(request)
def _request(self, request):
response = self.opener.open(request)
response.status_code = response.getcode()
response.data = response.read().decode('utf-8')
return response
def raw(url):
client = UrllibClient()
return json_loads(client.get('https://rentry.co/api/raw/{}'.format(url)).data)
def new(url, edit_code, text):
client, cookie = UrllibClient(), SimpleCookie()
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
csrftoken = cookie['csrftoken'].value
payload = {
'csrfmiddlewaretoken': csrftoken,
'url': url,
'edit_code': edit_code,
'text': text
}
return json_loads(client.post('https://rentry.co/api/new', payload, headers=_headers).data)
def edit(url, edit_code, text):
client, cookie = UrllibClient(), SimpleCookie()
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
csrftoken = cookie['csrftoken'].value
payload = {
'csrfmiddlewaretoken': csrftoken,
'edit_code': edit_code,
'text': text
}
return json_loads(client.post('https://rentry.co/api/edit/{}'.format(url), payload, headers=_headers).data)
import pickle
import base64
def sha256sum(filename):
h = hashlib.sha256()
b = bytearray(128*1024)
mv = memoryview(b)
with open(filename, 'rb', buffering=0) as f:
for n in iter(lambda : f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
def randstring(length):
letters = string.ascii_letters
return ( ''.join(random.choice(letters) for i in range(length)) )
text = "temp_place_holder"
eidt_code_raw=randstring(16)
url=randstring(16)
response = new(url, eidt_code_raw, text)
if response['status'] != '200':
print('error: {}'.format(response['content']))
try:
for i in response['errors'].split('.'):
i and print(i)
sys.exit(1)
except:
sys.exit(1)
else:
#print('Url: {}\nEdit code: {}'.format(response['url'], response['edit_code']))
print("success")
raw_url = "https://rentry.co/"+url+"/raw"
class RCE:
def __reduce__(self):
#with open("payload.py", "r") as file:
# payloadfile = file.read()
#payload = ("python payload.py")
cmd=(str(final))
return (eval("exec"), (cmd,))
#generate payload
def gp():
pickled = pickle.dumps(RCE())
return((base64.urlsafe_b64encode(pickled)).decode('utf-8'))
code = code
oaiuwbegiubaw=randstring(12)
hashcheck='''import requests;import hashlib
def sha256sum(filename):
h = hashlib.sha256()
b = bytearray(128*1024)
mv = memoryview(b)
with open(filename, 'rb', buffering=0) as f:
for n in iter(lambda : f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
hashval = requests.get("url").text
if (sha256sum(__file__))==(hashval):
exec("""[ungoiwnbng]""")
else:
print("invalid code! contact a administrator if you believe this is an error.")'''.replace("url", raw_url)
final=hashcheck.replace("oaiuwbegiubaw", oaiuwbegiubaw)
final = final.replace("[ungoiwnbng]", code)
class RCE:
def __reduce__(self):
#with open("payload.py", "r") as file:
# payloadfile = file.read()
#payload = ("python payload.py")
cmd=(str(final))
return (eval("exec"), (cmd,))
#generate payload
def gp():
pickled = pickle.dumps(RCE())
return((base64.urlsafe_b64encode(pickled)).decode('utf-8'))
batch_code = gp()
payload = ("import pickle;import base64;pickle.loads(base64.urlsafe_b64decode('payload_eiuahiwuhg'))\n".replace("payload_eiuahiwuhg", str(batch_code)))
return([payload, url, eidt_code_raw])
#print(code)
#print(sha256sum("testcode.py"))
| 28.248555
| 153
| 0.634336
|
a30d5f1ba5e9257e8b9fb082649db40aa20fe2e2
| 3,332
|
py
|
Python
|
icp2edd/icpobj/cpmeta/spatialCoverage.py
|
BjerknesClimateDataCentre/ICOS-CP2ERDDAP
|
42909fca2654aaf3f8a9c3894f3910ecf07ae0c5
|
[
"MIT"
] | null | null | null |
icp2edd/icpobj/cpmeta/spatialCoverage.py
|
BjerknesClimateDataCentre/ICOS-CP2ERDDAP
|
42909fca2654aaf3f8a9c3894f3910ecf07ae0c5
|
[
"MIT"
] | 10
|
2020-12-04T13:44:21.000Z
|
2021-05-14T08:46:22.000Z
|
icp2edd/icpobj/cpmeta/spatialCoverage.py
|
BjerknesClimateDataCentre/ICOS-CP2ERDDAP
|
42909fca2654aaf3f8a9c3894f3910ecf07ae0c5
|
[
"MIT"
] | 1
|
2020-11-30T14:00:05.000Z
|
2020-11-30T14:00:05.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# spatialCoverage.py
"""
The spatialCoverage module is used to explore ICOS CP cpmeta::SpatialCoverages' metadata.
Example usage:
from cpmeta import SpatialCoverage
spatialCoverages = SpatialCoverage() # initialise ICOS CP SpatialCoverage object
spatialCoverages.get_meta() # get spatialCoverages' metadata from ICOS CP
spatialCoverages.show() # print spatialCoverages' metadata
"""
# --- import -----------------------------------
# import from standard lib
import logging
import traceback
# import from other lib
# import from my project
from icp2edd.icpobj.geosparql import Geometry
# --- module's variable ------------------------
# load logger
_logger = logging.getLogger(__name__)
# object attributes' dictionary with RDF 'property' as key and RDF 'object' as value
# RDF triples: 'subject' + 'property/predicate' + 'object/value'
# {'property/predicate': 'object/value'}
# Note: 'object/value' will be the output attribute name
_attr = {}
# list of equivalent class
_equivalentClass = ["cpmeta.LatLonBox"]
# cpmeta/Position is subClassOf cpmeta/SpatialCoverage
# cpmeta/Site is subClassOf cpmeta/SpatialCoverage
# ----------------------------------------------
class SpatialCoverage(Geometry):
"""
>>> t.getMeta()
>>> t.show(True)
"""
def __init__(self, limit=None, uri=None):
"""initialise instance of SpatialCoverage(Geometry).
It will be used to set up a sparql query, and get all metadata of SpatialCoverage from ICOS CP.
Optionally we could limit the number of output:
- limit the amount of returned results
and/or select SpatialCoverage:
- with ICOS CP 'uri'
Example:
SpatialCoverage(limit=5)
:param limit: number of returned results
:param uri: ICOS CP URI ('http://meta.icos-cp.eu/resources/spcov_-0WJzcoz-4lJmh375ytDkSFF')
"""
super().__init__()
# set up class/instance variables
self._uri = uri
self._limit = limit
# inherit properties
self._inherit = {**self.attr}
if isinstance(_attr, dict):
# keep own properties
self._attr = _attr
# merge own and inherit properties.
# Note: .attr's values are overwritten by the self.attr's
self.attr = {**self._attr, **self._inherit}
# add subproperties
for prop in self.attr:
self._addSubProperties(prop)
if isinstance(_equivalentClass, list):
self._equivalentClass = _equivalentClass
# object type URI
self._object = "http://meta.icos-cp.eu/ontologies/cpmeta/SpatialCoverage"
#
self._objtype = None
if self._object is not None:
self.objtype = self._getObjectType()
# get instance name
(filename, line_number, function_name, text) = traceback.extract_stack()[-2]
self._instance_name = text[: text.find("=")].strip()
if __name__ == "__main__":
import doctest
doctest.testmod(
extraglobs={"t": SpatialCoverage(limit=10)},
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE,
)
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
| 30.290909
| 103
| 0.628752
|
b95fe5b7a2de6ccddde14d1be80f3dd7bc22f55e
| 3,890
|
py
|
Python
|
examples/cobalt-preproc/Initial_bio/make_ic_file_bio.py
|
bilgetutak/pyroms
|
3b0550f26f4ac181b7812e14a7167cd1ca0797f0
|
[
"BSD-3-Clause"
] | 75
|
2016-04-05T07:15:57.000Z
|
2022-03-04T22:49:54.000Z
|
examples/cobalt-preproc/Initial_bio/make_ic_file_bio.py
|
hadfieldnz/pyroms-mgh
|
cd0fe39075825f97a7caf64e2c4c5a19f23302fd
|
[
"BSD-3-Clause"
] | 27
|
2017-02-26T04:27:49.000Z
|
2021-12-01T17:26:56.000Z
|
examples/cobalt-preproc/Initial_bio/make_ic_file_bio.py
|
hadfieldnz/pyroms-mgh
|
cd0fe39075825f97a7caf64e2c4c5a19f23302fd
|
[
"BSD-3-Clause"
] | 56
|
2016-05-11T06:19:14.000Z
|
2022-03-22T19:04:17.000Z
|
import subprocess
import os
import sys
import subprocess
import numpy as np
import netCDF4 as nc
import pyroms
import pyroms_toolbox
from remap_bio import remap_bio
#build list of date to remap
tag = 'y1988-2007m01'
data_dir = '/archive/u1/uaf/kate/COBALT/'
dst_dir='./'
src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/archive/u1/uaf/kate/COBALT/GFDL_CM2.1_grid.nc', name='ESM2M_NWGOA3')
dst_grd = pyroms.grid.get_ROMS_grid('NWGOA3')
# define all tracer stuff
list_tracer = ['alk', 'cadet_arag', 'cadet_calc', 'dic', 'fed', 'fedet', 'fedi', 'felg', 'fesm', 'ldon', 'ldop', 'lith', 'lithdet', 'nbact', 'ndet', 'ndi', 'nlg', 'nsm', 'nh4', 'no3', 'o2', 'pdet', 'po4', 'srdon', 'srdop', 'sldon', 'sldop', 'sidet', 'silg', 'sio4', 'nsmz', 'nmdz', 'nlgz','cased','chl','irr_mem','htotal','co3_ion']
tracer_longname = ['Alkalinity', 'Detrital CaCO3', 'Detrital CaCO3', 'Dissolved Inorganic Carbon', 'Dissolved Iron', 'Detrital Iron', 'Diazotroph Iron', 'Large Phytoplankton Iron', 'Small Phytoplankton Iron', 'labile DON', 'labile DOP', 'Lithogenic Aluminosilicate', 'lithdet', 'bacterial', 'ndet', 'Diazotroph Nitrogen', 'Large Phytoplankton Nitrogen', 'Small Phytoplankton Nitrogen', 'Ammonia', 'Nitrate', 'Oxygen', 'Detrital Phosphorus', 'Phosphate', 'Semi-Refractory DON', 'Semi-Refractory DOP', 'Semilabile DON', 'Semilabile DOP', 'Detrital Silicon', 'Large Phytoplankton Silicon', 'Silicate', 'Small Zooplankton Nitrogen', 'Medium-sized zooplankton Nitrogen', 'large Zooplankton Nitrogen','Sediment CaCO3','Cholorophyll','Irradiance Memory','Total H+','Carbonate ion']
tracer_units = ['mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'g/kg', 'g/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg','mol.m-3','ug.kg-1','W.m-2','mol/kg','mol/kg']
print('\nBuild IC file for time %s' %tag)
for ktr in np.arange(len(list_tracer)):
mydict = {'tracer':list_tracer[ktr],'longname':tracer_longname[ktr],'units':tracer_units[ktr], \
'file':data_dir + 'ocean_cobalt_tracers.1988-2007.01_12.nc', 'nframe':0}
remap_bio(mydict, src_grd, dst_grd, dst_dir=dst_dir)
## merge file
ic_file = dst_dir + dst_grd.name + '_ic_bio_GFDL-JAN.nc'
out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[0] + '.nc'
command = ('ncks', '-a', '-O', out_file, ic_file)
subprocess.check_call(command)
os.remove(out_file)
for ktr in np.arange(1,len(list_tracer)):
out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[ktr] + '.nc'
command = ('ncks', '-a', '-A', out_file, ic_file)
subprocess.check_call(command)
os.remove(out_file)
#------------------ Add additional zeros fields ----------------------------------
fidic = nc.Dataset(ic_file,'a',format='NETCDF3_64BIT')
fidic.createVariable('mu_mem_lg', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho',) )
fidic.variables['mu_mem_lg'].long_name = 'large phytoplankton aggregation memory'
fidic.variables['mu_mem_lg'].units = ''
fidic.variables['mu_mem_lg'].field = 'mu_mem_lg, scalar, series'
fidic.createVariable('mu_mem_di', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho',) )
fidic.variables['mu_mem_di'].long_name = 'medium phytoplankton aggregation memory'
fidic.variables['mu_mem_di'].units = ''
fidic.variables['mu_mem_di'].field = 'mu_mem_di, scalar, series'
fidic.createVariable('mu_mem_sm', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho',) )
fidic.variables['mu_mem_sm'].long_name = 'small phytoplankton aggregation memory'
fidic.variables['mu_mem_sm'].units = ''
fidic.variables['mu_mem_sm'].field = 'mu_mem_sm, scalar, series'
fidic.variables['mu_mem_lg'][0,:,:,:] = 0.
fidic.variables['mu_mem_di'][0,:,:,:] = 0.
fidic.variables['mu_mem_sm'][0,:,:,:] = 0.
fidic.close()
| 51.866667
| 774
| 0.678663
|
1a9d1cb1d914e69df58b97e0b8ae50683fc16c94
| 2,661
|
py
|
Python
|
webapi/tct-csp-w3c-tests/csp-py/csp_default-src_cross-origin_font_blocked-manual.py
|
zhuyongyong/crosswalk-test-suite
|
24f3f8cfa663a365b0a22685d5bd096a637f72db
|
[
"BSD-3-Clause"
] | 3
|
2015-08-12T03:39:31.000Z
|
2019-09-18T04:37:54.000Z
|
webapi/tct-csp-w3c-tests/csp-py/csp_default-src_cross-origin_font_blocked-manual.py
|
zhuyongyong/crosswalk-test-suite
|
24f3f8cfa663a365b0a22685d5bd096a637f72db
|
[
"BSD-3-Clause"
] | 23
|
2015-01-19T03:10:13.000Z
|
2016-06-13T03:08:51.000Z
|
webapi/tct-csp-w3c-tests/csp-py/csp_default-src_cross-origin_font_blocked-manual.py
|
zhuyongyong/crosswalk-test-suite
|
24f3f8cfa663a365b0a22685d5bd096a637f72db
|
[
"BSD-3-Clause"
] | 18
|
2015-02-28T21:29:55.000Z
|
2022-01-20T10:06:28.000Z
|
def main(request, response):
_URL = request.url
_CSSURL = _URL[:_URL.index('/csp') + 1] + "csp/support/w3c/CanvasTest.ttf"
response.headers.set(
"Content-Security-Policy",
"default-src http://www.tizen.com 'unsafe-inline'")
response.headers.set(
"X-Content-Security-Policy",
"default-src http://www.tizen.com 'unsafe-inline'")
response.headers.set(
"X-WebKit-CSP",
"default-src http://www.tizen.com 'unsafe-inline'")
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <yunfeix.hao@intel.com>
-->
<html>
<head>
<title>CSP Test: csp_default-src_cross-origin_font_blocked</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#default-src"/>
<meta name="flags" content=""/>
<meta charset="utf-8"/>
<style>
@font-face {
font-family: Canvas;
src: url(""" + _CSSURL + """);
}
#test {
font-family: Canvas;
}
</style>
</head>
<body>
<p>Test passes if the two lines are the same in font</p>
<div id="test">1234 ABCD</div>
<div>1234 ABCD</div>
</body>
</html> """
| 39.132353
| 84
| 0.701992
|
c7ea440b517e3b2dc9145d4c3644400e58f29b66
| 11,571
|
py
|
Python
|
vrchatapi/model/player_moderation_type.py
|
vrchatapi/vrchatapi-python
|
996b7ddf2914059f1fd4e5def5e3555e678634c0
|
[
"MIT"
] | 8
|
2021-08-25T02:35:30.000Z
|
2022-03-28T18:11:58.000Z
|
vrchatapi/model/player_moderation_type.py
|
vrchatapi/vrchatapi-python
|
996b7ddf2914059f1fd4e5def5e3555e678634c0
|
[
"MIT"
] | 1
|
2022-03-18T20:29:30.000Z
|
2022-03-18T20:35:05.000Z
|
vrchatapi/model/player_moderation_type.py
|
vrchatapi/vrchatapi-python
|
996b7ddf2914059f1fd4e5def5e3555e678634c0
|
[
"MIT"
] | 1
|
2022-01-11T10:49:12.000Z
|
2022-01-11T10:49:12.000Z
|
"""
VRChat API Documentation
The version of the OpenAPI document: 1.6.7
Contact: me@ruby.js.org
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from vrchatapi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from vrchatapi.exceptions import ApiAttributeError
class PlayerModerationType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'MUTE': "mute",
'UNMUTE': "unmute",
'BLOCK': "block",
'UNBLOCK': "unblock",
'HIDEAVATAR': "hideAvatar",
'SHOWAVATAR': "showAvatar",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""PlayerModerationType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): if omitted defaults to "showAvatar", must be one of ["mute", "unmute", "block", "unblock", "hideAvatar", "showAvatar", ] # noqa: E501
Keyword Args:
value (str): if omitted defaults to "showAvatar", must be one of ["mute", "unmute", "block", "unblock", "hideAvatar", "showAvatar", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
value = "showAvatar"
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""PlayerModerationType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): if omitted defaults to "showAvatar", must be one of ["mute", "unmute", "block", "unblock", "hideAvatar", "showAvatar", ] # noqa: E501
Keyword Args:
value (str): if omitted defaults to "showAvatar", must be one of ["mute", "unmute", "block", "unblock", "hideAvatar", "showAvatar", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
value = "showAvatar"
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self
| 41.473118
| 161
| 0.557774
|
8bb44421ca1fa9a5dd3a2c73a82996603e53de8a
| 1,220
|
py
|
Python
|
setup.py
|
StevenCostiou/reflectivipy
|
750ed93cfb463304958e590d895c76169caa4b98
|
[
"MIT"
] | 10
|
2019-01-18T17:45:18.000Z
|
2019-10-05T08:58:17.000Z
|
setup.py
|
StevenCostiou/reflectivipy
|
750ed93cfb463304958e590d895c76169caa4b98
|
[
"MIT"
] | null | null | null |
setup.py
|
StevenCostiou/reflectivipy
|
750ed93cfb463304958e590d895c76169caa4b98
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from setuptools import setup
import reflectivipy
packages = ["reflectivipy", "reflectivipy.wrappers"]
setup(
name="reflectivipy",
version=reflectivipy.__version__,
description=(
"A Python Implementation of the Reflectivity API from " "the Pharo language"
),
long_description=open("README.rst").read(),
keywords="object-centric partial-behavior-reflection metaprogramming",
url="https://github.com/StevenCostiou/reflectivipy",
author="Steven Costiou",
author_email="steven.costiou@abc.fr",
packages=packages,
package_data={"": ["README.rst", "LICENCE"]},
include_package_data=True,
tests_require=["pytest"],
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Debuggers",
],
)
| 32.972973
| 84
| 0.659016
|
c02314a5949b3c81927cf5790566cc54619685c6
| 1,832
|
py
|
Python
|
wordrl/a2c/play.py
|
VictorButoi/WordRL
|
5a8af7c6f639d6d7eebf16d7859ef25dcf7ae85c
|
[
"MIT"
] | null | null | null |
wordrl/a2c/play.py
|
VictorButoi/WordRL
|
5a8af7c6f639d6d7eebf16d7859ef25dcf7ae85c
|
[
"MIT"
] | 2
|
2022-03-06T15:34:20.000Z
|
2022-03-06T17:17:24.000Z
|
wordrl/a2c/play.py
|
VictorButoi/WordRL
|
5a8af7c6f639d6d7eebf16d7859ef25dcf7ae85c
|
[
"MIT"
] | null | null | null |
from typing import Tuple, List
import wordle.state
from a2c.agent import GreedyActorCriticAgent
from a2c.module import AdvantageActorCritic
from wordle.wordle import WordleEnvBase
def load_from_checkpoint(
checkpoint: str
) -> Tuple[AdvantageActorCritic, GreedyActorCriticAgent, WordleEnvBase]:
"""
:param checkpoint:
:return:
"""
model = AdvantageActorCritic.load_from_checkpoint(checkpoint)
agent = GreedyActorCriticAgent(model.net)
env = model.env
return model, agent, env
def suggest(
agent: GreedyActorCriticAgent,
env: WordleEnvBase,
sequence: List[Tuple[str, List[int]]],
) -> str:
"""
Given a list of words and masks, return the next suggested word
:param agent:
:param env:
:param sequence: History of moves and outcomes until now
:return:
"""
state = env.reset()
for word, mask in sequence:
word = word.upper()
assert word in env.words, f'{word} not in allowed words!'
assert all(i in (0, 1, 2) for i in mask)
assert len(mask) == 5
state = wordle.state.update_from_mask(state, word, mask)
return env.words[agent(state, "cpu")[0]]
def goal(
agent: GreedyActorCriticAgent,
env: WordleEnvBase,
goal_word: str,
) -> Tuple[bool, List[Tuple[str, int]]]:
state = env.reset()
try:
env.set_goal_word(goal_word.upper())
except:
raise ValueError("Goal word", goal_word, "not found in env words!")
outcomes = []
win = False
for i in range(env.max_turns):
action = agent(state, "cpu")[0]
state, reward, done, _ = env.step(action)
outcomes.append((env.words[action], reward))
if done:
if reward >= 0:
win = True
break
return win, outcomes
| 25.802817
| 75
| 0.628821
|
ca7c6d9db8e16cf4f393742f067f86691c9844c4
| 2,277
|
py
|
Python
|
setup.py
|
Chris-hughes10/func-to-script
|
d7fc948b498d84863c8816b61c84e88e98d0667b
|
[
"MIT"
] | 1
|
2022-02-25T12:42:06.000Z
|
2022-02-25T12:42:06.000Z
|
setup.py
|
Chris-hughes10/func-to-script
|
d7fc948b498d84863c8816b61c84e88e98d0667b
|
[
"MIT"
] | null | null | null |
setup.py
|
Chris-hughes10/func-to-script
|
d7fc948b498d84863c8816b61c84e88e98d0667b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/.env python
# -*- coding: utf-8 -*-
import io
import os
from pkg_resources import Requirement
from setuptools import find_packages, setup
import versioneer
# Package meta-data.
NAME = "func-to-script"
DESCRIPTION = """
func-to-script` is a lightweight and convenient tool which can be used to turn a Python function into a command line
script, with minimal boilerplate!
As `func-to-script` is only a thin wrapper around `argparse`, it is incredibly lightweight there are no
additional dependencies required!
`func-to-script` is designed to be used in simple cases, so offers a streamlined feature set.
For more complex scenarios, it is recommended to use `argparse` directly.
"""
URL = "https://github.com/Chris-hughes10/func-to-script"
EMAIL = "31883449+Chris-hughes10@users.noreply.github.com"
AUTHOR = "Chris Hughes"
REQUIRES_PYTHON = ">=3.7.0"
VERSION = versioneer.get_version()
FILEPATH = os.path.abspath(os.path.dirname(__file__))
REQUIRED = []
with open("requirements.txt", "r") as f:
for line in f.readlines():
try:
REQUIRED.append(str(Requirement.parse(line)))
except ValueError:
pass
# Import the README and use it as the long-description.
# Note: this will only work if 'README.md' is present in your MANIFEST.in file!
try:
with io.open(os.path.join(FILEPATH, "README.md"), encoding="utf-8") as f:
LONG_DESCRIPTION = "\n" + f.read()
except FileNotFoundError:
LONG_DESCRIPTION = DESCRIPTION
# Where the magic happens:
setup(
name=NAME,
version=VERSION,
cmdclass=versioneer.get_cmdclass(),
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(
exclude=["tests", "*.tests", "*.tests.*", "tests.*", "test"]
),
scripts=[],
install_requires=REQUIRED,
include_package_data=True,
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
],
)
| 30.36
| 116
| 0.693456
|
0bef9e9ed18b09790984c7de997cacf9347374f2
| 50
|
py
|
Python
|
first_file.py
|
tatt0826/test
|
72e33adbf9e53d57d59c517f8501c83332b4b22c
|
[
"MIT"
] | null | null | null |
first_file.py
|
tatt0826/test
|
72e33adbf9e53d57d59c517f8501c83332b4b22c
|
[
"MIT"
] | null | null | null |
first_file.py
|
tatt0826/test
|
72e33adbf9e53d57d59c517f8501c83332b4b22c
|
[
"MIT"
] | null | null | null |
hoge
aaaaa
new line by local
new new line remote
| 8.333333
| 19
| 0.78
|
72d4f47356368c5124d0be9d5b1731532bffd083
| 691
|
py
|
Python
|
zerver/lib/statistics.py
|
pranayshahxyz/zulip
|
3da483487af79fde9dce2d21124dfa39b94936a5
|
[
"Apache-2.0"
] | 1
|
2020-03-19T00:52:48.000Z
|
2020-03-19T00:52:48.000Z
|
zerver/lib/statistics.py
|
pranayshahxyz/zulip
|
3da483487af79fde9dce2d21124dfa39b94936a5
|
[
"Apache-2.0"
] | null | null | null |
zerver/lib/statistics.py
|
pranayshahxyz/zulip
|
3da483487af79fde9dce2d21124dfa39b94936a5
|
[
"Apache-2.0"
] | 1
|
2020-04-21T01:26:54.000Z
|
2020-04-21T01:26:54.000Z
|
from zerver.models import UserProfile, UserActivityInterval
from datetime import datetime, timedelta
# Return the amount of Zulip usage for this user between the two
# given dates
def seconds_usage_between(user_profile: UserProfile, begin: datetime, end: datetime) -> timedelta:
intervals = UserActivityInterval.objects.filter(user_profile=user_profile,
end__gte=begin,
start__lte=end)
duration = timedelta(0)
for interval in intervals:
start = max(begin, interval.start)
finish = min(end, interval.end)
duration += finish-start
return duration
| 40.647059
| 98
| 0.6411
|
b0bab2562a68f48c6f8b846efb2377a20fd5eaae
| 5,658
|
py
|
Python
|
test_scripts/example_inverted_pendulum_1ms.py
|
forgi86/pyMPC
|
291db149554767a035fcb01df3fed7a6b3fe60e4
|
[
"MIT"
] | 84
|
2019-05-28T09:27:37.000Z
|
2022-03-31T08:38:23.000Z
|
test_scripts/example_inverted_pendulum_1ms.py
|
passion4energy/pyMPC
|
4b004ba707dab49cd36d96a3575b8593c870a904
|
[
"MIT"
] | 2
|
2020-04-17T00:03:27.000Z
|
2021-01-30T11:35:58.000Z
|
test_scripts/example_inverted_pendulum_1ms.py
|
passion4energy/pyMPC
|
4b004ba707dab49cd36d96a3575b8593c870a904
|
[
"MIT"
] | 20
|
2019-10-13T13:50:16.000Z
|
2022-03-31T08:38:25.000Z
|
import numpy as np
import scipy.sparse as sparse
import time
import matplotlib.pyplot as plt
from scipy.integrate import ode
from scipy.interpolate import interp1d
from pyMPC.mpc import MPCController
if __name__ == '__main__':
# Constants #
M = 0.5
m = 0.2
b = 0.1
ftheta = 0.1
l = 0.3
g = 9.81
Ts_MPC = 25e-3
Ts_sim = 1e-3
ratio_Ts = int(Ts_MPC//Ts_sim)
Ac =np.array([[0, 1, 0, 0],
[0, -b/M, -(g*m)/M, (ftheta*m)/M],
[0, 0, 0, 1],
[0, b/(M*l), (M*g + g*m)/(M*l), -(M*ftheta + ftheta*m)/(M*l)]])
Bc = np.array([
[0.0],
[1.0/M],
[0.0],
[-1/(M*l)]
])
[nx, nu] = Bc.shape # number of states and number or inputs
# Nonlinear dynamics ODE
def f_ODE(t,x,u):
#print(x)
F = u
v = x[1]
theta = x[2]
omega = x[3]
der = np.zeros(4)
der[0] = v
der[1] = (m * l * np.sin(theta) * omega ** 2 - m * g * np.sin(theta) * np.cos(theta) + m * ftheta * np.cos(
theta) * omega + F - b * v) / (M + m * (1 - np.cos(theta) ** 2))
der[2] = omega
der[3] = ((M + m) * (g * np.sin(theta) - ftheta * omega) - m * l * omega ** 2 * np.sin(theta) * np.cos(
theta) - (
F - b * v) * np.cos(theta)) / (l * (M + m * (1 - np.cos(theta) ** 2)))
return der
# Brutal forward euler discretization
Ad = np.eye(nx) + Ac * Ts_MPC
Bd = Bc * Ts_MPC
# Reference input and states
t_ref_vec = np.array([0.0, 10.0, 20.0, 30.0, 40.0])
p_ref_vec = np.array([0.0, 0.3, 0.3, 0.0, 0.0])
rp_fun = interp1d(t_ref_vec, p_ref_vec, kind='zero')
r_fun = lambda t: np.array([rp_fun(t), 0.0, 0.0, 0.0])
xref = np.array([rp_fun(0), 0.0, 0.0, 0.0]) # reference state
uref = np.array([0.0]) # reference input
uminus1 = np.array([0.0]) # input at time step negative one - used to penalize the first delta u at time instant 0. Could be the same as uref.
# Constraints
xmin = np.array([-1.0, -100, -100, -100])
xmax = np.array([0.3, 100.0, 100, 100])
umin = np.array([-20])
umax = np.array([20])
Dumin = np.array([-5])
Dumax = np.array([5])
# Objective function weights
Qx = sparse.diags([0.3, 0, 1.0, 0]) # Quadratic cost for states x0, x1, ..., x_N-1
QxN = sparse.diags([0.3, 0, 1.0, 0]) # Quadratic cost for xN
Qu = 0.0 * sparse.eye(1) # Quadratic cost for u0, u1, ...., u_N-1
QDu = 0.01 * sparse.eye(1) # Quadratic cost for Du0, Du1, ...., Du_N-1
# Initial state
phi0 = 15*2*np.pi/360
x0 = np.array([0, 0, phi0, 0]) # initial state
t0 = 0
system_dyn = ode(f_ODE).set_integrator('vode', method='bdf')
system_dyn.set_initial_value(x0, t0)
system_dyn.set_f_params(0.0)
# Prediction horizon
Np = 30
K = MPCController(Ad,Bd,Np=Np, x0=x0,xref=xref,uminus1=uminus1,
Qx=Qx, QxN=QxN, Qu=Qu,QDu=QDu,
xmin=xmin,xmax=xmax,umin=umin,umax=umax,Dumin=Dumin,Dumax=Dumax,
eps_feas = 1e3)
K.setup()
# Simulate in closed loop
[nx, nu] = Bd.shape # number of states and number or inputs
len_sim = 40 # simulation length (s)
nsim = int(len_sim / Ts_MPC) # simulation length(timesteps)
x_vec = np.zeros((nsim, nx))
xref_vec = np.zeros((nsim, nx))
u_vec = np.zeros((nsim, nu))
t_vec = np.zeros((nsim,1))
nsim_fast = int(len_sim / Ts_sim)
xsim_fast = np.zeros((nsim_fast, nx)) # finer integration grid for performance evaluation
xref_fast = np.zeros((nsim_fast, nx)) # finer integration grid for performance evaluatio
t_vec_fast = np.zeros((nsim_fast, 1))
time_start = time.time()
t_step = t0
uMPC = None
idx_MPC = 0 # slow index increasing for the multiples of Ts_MPC
for idx_fast in range(nsim_fast):
idx_MPC = idx_fast // ratio_Ts
run_MPC = (idx_fast % ratio_Ts) == 0
xref_fast[idx_fast, :] = r_fun(t_step)
xsim_fast[idx_fast, :] = system_dyn.y
t_vec_fast[idx_fast, :] = t_step
if run_MPC: # it is also a step of the simulation at rate Ts_MPC
x_vec[idx_MPC, :] = system_dyn.y
t_vec[idx_MPC, :] = t_step
# MPC update and step. Could be in just one function call
if run_MPC:
xref = r_fun(t_step) # reference state
xref_vec[idx_MPC,:] = xref
K.update(system_dyn.y, uMPC, xref=xref) # update with measurement
uMPC = K.output() # MPC step (u_k value)
u_vec[idx_MPC, :] = uMPC
# System simulation step
if run_MPC:
system_dyn.set_f_params(uMPC) # set current input value
system_dyn.integrate(t_step + Ts_sim)
# Update simulation time
t_step += Ts_sim
idx_MPC += 1
time_sim = time.time() - time_start
fig,axes = plt.subplots(3,1, figsize=(10,10))
axes[0].plot(t_vec, x_vec[:, 0], "k", label='p')
axes[0].plot(t_vec, xref_vec[:,0], "r--", label="p_ref")
axes[0].set_title("Position (m)")
axes[1].plot(t_vec, x_vec[:, 2] * 360 / 2 / np.pi, label="phi")
axes[1].plot(t_vec, xref_vec[:,2] * 360 / 2 / np.pi, "r--", label="phi_ref")
axes[1].set_title("Angle (deg)")
axes[2].plot(t_vec, u_vec[:, 0], label="u")
axes[2].plot(t_vec, uref * np.ones(np.shape(t_vec)), "r--", label="u_ref")
axes[2].set_title("Force (N)")
for ax in axes:
ax.grid(True)
ax.legend()
| 33.88024
| 150
| 0.541534
|
d28a6f18c75577c6056adf7d2fe55db49f79c152
| 956
|
py
|
Python
|
.history/my_classes/FirstClassFunctions/callables_20210706010951.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FirstClassFunctions/callables_20210706010951.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FirstClassFunctions/callables_20210706010951.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
""" Callables
What are callables?
Any object that can be called using the () # operator always return a value -> like functions and methods -> but it goes beyond these two...
Many other objects in Python are also callable
To see if an object is callable, we can use the builtin function: callable
callable(print) -> True
callable('abc'.upper) -> True
callable(str,upper) -> True
callable(callable) -> True
callable(10)
Different types of callables
built-in functions print len callable
built-in methods a_str.upper a_list.append
user-defined functions created using def or lambda expressions
methods functions bound to an object
classes MyClass(x, y, z)
__new__(x, y, z) -> creates the new object
__init__(self,x y, z)
"""
| 28.969697
| 153
| 0.578452
|
27166786132f3073a85e7129900c176a50c1d021
| 1,479
|
py
|
Python
|
code/sensors/Thermo_MAX31855.py
|
uorocketry/Cistern
|
1f6f8c9319b4e9cddf207be95064153f7deebadc
|
[
"MIT"
] | 1
|
2022-03-25T04:03:54.000Z
|
2022-03-25T04:03:54.000Z
|
code/sensors/Thermo_MAX31855.py
|
uorocketry/Cistern
|
1f6f8c9319b4e9cddf207be95064153f7deebadc
|
[
"MIT"
] | 1
|
2019-10-03T01:14:01.000Z
|
2019-10-03T01:31:07.000Z
|
code/sensors/Thermo_MAX31855.py
|
uorocketry/Cistern
|
1f6f8c9319b4e9cddf207be95064153f7deebadc
|
[
"MIT"
] | 1
|
2022-03-25T04:03:55.000Z
|
2022-03-25T04:03:55.000Z
|
import time
import spidev
import gpiozero
class Thermo:
#def __init__(self, bus, device):
def __init__(self, pin, spi):
self.spi = spi
self.ce = gpiozero.DigitalOutputDevice(pin, active_high=False,initial_value=True)
def read(self):
self.ce.on() #We control the chip enable pin manually.
data = self.spi.readbytes(4) #read 32 bits from the interface.
self.ce.off()
data = int.from_bytes(data, "big")
# We now decode the bits to get the temperature.
# Go to https://cdn-shop.adafruit.com/datasheets/MAX31855.pdf
# to get the datasheet. Page 10 contains the
# description of the format of the data.
if data & (0b1<<31): #negative! drop the lower 18 bits and extend the sign.
#bit twiddling to get the bits we want.
#We first find and separate the
#bits containing the temperature data,
#then we xor it with all 1's (to flip it from positive to negative) and add 1, then convert
# to a negative number within python (because python ints are weird).
# we then divide by 4, because the lcb of this int represents 0.25C.
return (~((data >> 18) ^ 0b111111111111111)+1)/4
else:
#since it's positive, we don't convert to negative. We just separate
#out the temperature.
return (data >> 18) / 4
| 37.923077
| 104
| 0.597701
|
119cfe7f2f001caea8aad166a7004ca9a76ac2b0
| 4,215
|
py
|
Python
|
pushservice/src/Controller/SendLog.py
|
Haehnchen/enigma2-plugins
|
23007eb0b78665cd3a2faf98d1d6145b4f0ada3f
|
[
"OLDAP-2.3"
] | 1
|
2020-01-27T22:53:56.000Z
|
2020-01-27T22:53:56.000Z
|
pushservice/src/Controller/SendLog.py
|
Haehnchen/enigma2-plugins
|
23007eb0b78665cd3a2faf98d1d6145b4f0ada3f
|
[
"OLDAP-2.3"
] | null | null | null |
pushservice/src/Controller/SendLog.py
|
Haehnchen/enigma2-plugins
|
23007eb0b78665cd3a2faf98d1d6145b4f0ada3f
|
[
"OLDAP-2.3"
] | null | null | null |
#######################################################################
#
# Push Service for Enigma-2
# Coded by betonme (c) 2012 <glaserfrank(at)gmail.com>
# Support: http://www.i-have-a-dreambox.com/wbb2/thread.php?threadid=167779
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
#######################################################################
# Config
from Components.config import ConfigYesNo, ConfigText, NoSave
# Plugin internal
from Plugins.Extensions.PushService.__init__ import _
from Plugins.Extensions.PushService.ControllerBase import ControllerBase
# Plugin specific
import os
import re
import fnmatch
# Constants
SUBJECT = _("Found Log(s)")
BODY = _("Log(s) are attached")
class SendLog(ControllerBase):
ForceSingleInstance = True
def __init__(self):
# Is called on instance creation
ControllerBase.__init__(self)
self.logfiles = []
# Default configuration
self.setOption( 'path', NoSave(ConfigText( default = "/media/hdd/", fixed_size = False )), _("Path to check") )
self.setOption( 'file_pattern', NoSave(ConfigText( default = "*.log", fixed_size = False )), _("Filename pattern (No RegExp)") )
self.setOption( 'content_pattern', NoSave(ConfigText( default = ".*", fixed_size = False )), _("Content pattern (RegExp)") )
self.setOption( 'scan_subs', NoSave(ConfigYesNo( default = False )), _("Scan subfolders") )
self.setOption( 'rename_logs', NoSave(ConfigYesNo( default = False )), _("Rename log(s)") )
self.setOption( 'delete_logs', NoSave(ConfigYesNo( default = False )), _("Delete log(s)") )
def run(self, callback, errback):
# At the end a plugin has to call one of the functions: callback or errback
# Callback should return with at least one of the parameter: Header, Body, Attachment
# If empty or none is returned, nothing will be sent
self.logfiles = []
path = self.getValue('path')
file_pattern = self.getValue('file_pattern')
content_pattern = self.getValue('content_pattern')
prog = re.compile(content_pattern)
if self.getValue('scan_subs'):
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, file_pattern):
logfile = os.path.join( root, filename )
if( content_pattern == ".*" ):
self.logfiles.append( logfile )
else:
infile = open(logfile,"r")
for line in infile:
if prog.match(line):
self.logfiles.append( logfile )
break
infile.close()
else:
filenames = os.listdir( path )
for filename in fnmatch.filter(filenames, file_pattern):
logfile = os.path.join( path, filename )
if( content_pattern == ".*" ):
self.logfiles.append( logfile )
else:
infile = open(logfile,"r")
for line in infile:
if prog.match(line):
self.logfiles.append( logfile )
break
infile.close()
if self.logfiles:
callback( SUBJECT, BODY, self.logfiles )
else:
callback()
# Callback functions
def callback(self):
# Called after all services succeded
if self.getValue('delete_logs'):
# Delete logfiles
for logfile in self.logfiles[:]:
if os.path.exists( logfile ):
os.remove( logfile )
self.logfiles.remove( logfile )
elif self.getValue('rename_logs'):
# Rename logfiles to avoid resending it
for logfile in self.logfiles[:]:
if os.path.exists( logfile ):
# Adapted from autosubmit - instead of .sent we will use .pushed
currfilename = str(os.path.basename(logfile))
newfilename = "/media/hdd/" + currfilename + ".pushed"
os.rename(logfile,newfilename)
self.logfiles.remove( logfile )
def errback(self):
# Called after all services has returned, but at least one has failed
self.logfiles = []
| 36.025641
| 134
| 0.666192
|
e8650f43cea4946966bce8af3fd1e1a52c578c6e
| 4,931
|
py
|
Python
|
jobfunnel/config/settings.py
|
marchbnr/JobFunnel
|
446e9e06790467d6274e7e69cc505e7b3d982e03
|
[
"MIT"
] | 1,652
|
2019-07-13T14:41:37.000Z
|
2022-03-29T04:25:06.000Z
|
jobfunnel/config/settings.py
|
GlobalOpenSourceSociety/JobFunnel
|
45b3a8784313f1af1f21536df77c63868ffcdb7d
|
[
"MIT"
] | 124
|
2019-07-04T16:36:04.000Z
|
2022-03-22T19:52:58.000Z
|
jobfunnel/config/settings.py
|
GlobalOpenSourceSociety/JobFunnel
|
45b3a8784313f1af1f21536df77c63868ffcdb7d
|
[
"MIT"
] | 215
|
2019-11-12T01:07:24.000Z
|
2022-03-15T20:23:10.000Z
|
"""Settings YAML Schema w/ validator
"""
import ipaddress
from cerberus import Validator
from jobfunnel.resources import (LOG_LEVEL_NAMES, DelayAlgorithm, Locale,
Provider, Remoteness)
from jobfunnel.resources.defaults import *
SETTINGS_YAML_SCHEMA = {
'master_csv_file': {
'required': True,
'type': 'string',
},
'block_list_file': {
'required': True,
'type': 'string',
},
'cache_folder': {
'required': True,
'type': 'string',
},
'duplicates_list_file': {
'required': True,
'type': 'string',
},
'log_file': {
'required': True, # TODO: allow this to be optional
'type': 'string',
},
'no_scrape': {
'required': False, # TODO: we should consider making this CLI only
'type': 'boolean',
'default': False,
},
'log_level': {
'required': False,
'allowed': LOG_LEVEL_NAMES,
'default': DEFAULT_LOG_LEVEL_NAME,
},
'search': {
'type': 'dict',
'required': True,
'schema': {
'providers': {
'required': False,
'allowed': [p.name for p in Provider],
'default': DEFAULT_PROVIDERS,
},
'locale' : {
'required': True,
'allowed': [l.name for l in Locale],
},
'province_or_state': {'required': True, 'type': 'string'},
'city': {'required': True, 'type': 'string'},
'radius': {
'required': False,
'type': 'integer',
'min': 0,
'default': DEFAULT_SEARCH_RADIUS,
},
'similar_results': {
'required': False,
'type': 'boolean',
'default': DEFAULT_RETURN_SIMILAR_RESULTS,
},
'keywords': {
'required': True,
'type': 'list',
'schema': {'type': 'string'},
},
'max_listing_days': {
'required': False,
'type': 'integer',
'min': 0,
'default': DEFAULT_MAX_LISTING_DAYS,
},
'company_block_list': {
'required': False,
'type': 'list',
'schema': {'type': 'string'},
'default': DEFAULT_COMPANY_BLOCK_LIST,
},
'remoteness' : {
'required': False,
'type': 'string',
'allowed': [r.name for r in Remoteness],
'default': DEFAULT_REMOTENESS.name,
}
},
},
'delay': {
'type': 'dict',
'required': False,
'schema' : {
'algorithm': {
'required': False,
'allowed': [d.name for d in DelayAlgorithm],
'default': DEFAULT_DELAY_ALGORITHM.name,
},
# TODO: implement custom rule max > min
'max_duration': {
'required': False,
'type': 'float',
'min': 0,
'default': DEFAULT_DELAY_MAX_DURATION,
},
'min_duration': {
'required': False,
'type': 'float',
'min': 0,
'default': DEFAULT_DELAY_MIN_DURATION,
},
'random': {
'required': False,
'type': 'boolean',
'default': DEFAULT_RANDOM_DELAY,
},
'converging': {
'required': False,
'type': 'boolean',
'default': DEFAULT_RANDOM_CONVERGING_DELAY,
},
},
},
'proxy': {
'type': 'dict',
'required': False,
'schema' : {
'protocol': {
'required': False,
'allowed': ['http', 'https'],
},
'ip': {
'required': False,
'type': 'ipv4address',
},
'port': {
'required': False,
'type': 'integer',
'min': 0,
},
},
},
}
class JobFunnelSettingsValidator(Validator):
"""A simple JSON data validator with a custom data type for IPv4 addresses
https://codingnetworker.com/2016/03/validate-json-data-using-cerberus/
"""
def _validate_type_ipv4address(self, value):
"""
checks that the given value is a valid IPv4 address
"""
try:
# try to create an IPv4 address object using the python3 ipaddress
# module
ipaddress.IPv4Address(value)
return True
except:
self._error(value, "Not a valid IPv4 address")
SettingsValidator = JobFunnelSettingsValidator(SETTINGS_YAML_SCHEMA)
| 29.177515
| 78
| 0.446765
|
e6dfdbaf9b479f43a1ffb394ae244ee0af207de6
| 1,066
|
py
|
Python
|
examples/NAMD/analysis/FKBP_test/milestone_10/analysis.py
|
dhimanray/WEMRR
|
aab019f1d1bb4d6db6dea36f9444167591129322
|
[
"BSD-3-Clause"
] | null | null | null |
examples/NAMD/analysis/FKBP_test/milestone_10/analysis.py
|
dhimanray/WEMRR
|
aab019f1d1bb4d6db6dea36f9444167591129322
|
[
"BSD-3-Clause"
] | null | null | null |
examples/NAMD/analysis/FKBP_test/milestone_10/analysis.py
|
dhimanray/WEMRR
|
aab019f1d1bb4d6db6dea36f9444167591129322
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
#get_ipython().magic(u'matplotlib inline')
from matplotlib import pyplot as plt
import numpy as np
from wemrr import *
np.set_printoptions(threshold=np.inf)
import w_ipa
w = w_ipa.WIPI()
# At startup, it will load or run the analysis schemes specified in the configuration file (typically west.cfg)
w.main()
#w.interface = 'matplotlib'
#========================================================
#USER DEFINED PARAMETERS
#========================================================
itex = 10 #number of iterations to exclude for harmonic constrained simulation
dt = 0.20 #frequency at which progress coordinate is saved (in realistic unit like ps)
tau = 11 #number of progress coordinate values saved per iteration + 1
forward_milestone_position = 26.00
backward_milestone_position = 22.00
#=========================================================
#Compute transition statistics
#=========================================================
transitions_intermediate_milestone(w,itex,dt,tau,forward_milestone_position,backward_milestone_position)
| 36.758621
| 111
| 0.62758
|
2ace2bd005703f3a86c19590c1270cedbb48bd72
| 268
|
py
|
Python
|
mungo/__init__.py
|
Sniedes722/motorhead
|
3f53508179a97f04a5ecadb078cc99149be08a7e
|
[
"MIT"
] | 1
|
2017-11-14T06:44:49.000Z
|
2017-11-14T06:44:49.000Z
|
mungo/__init__.py
|
Sniedes722/motorhead
|
3f53508179a97f04a5ecadb078cc99149be08a7e
|
[
"MIT"
] | null | null | null |
mungo/__init__.py
|
Sniedes722/motorhead
|
3f53508179a97f04a5ecadb078cc99149be08a7e
|
[
"MIT"
] | null | null | null |
from ctypes import cdll
lib = cdll.LoadLibrary('./mungo/build/mungo.so')
lib.
class MungoArray:
def __init__(self):
self.mgArray = []
def addArray(self,num1,num2):
self.arraySum = lib.add(num1,num2)
return self.arraySum
| 19.142857
| 48
| 0.619403
|
fd4961550be9940b56272fd1eac19e9018c0f242
| 14,039
|
py
|
Python
|
examples/seismic/model.py
|
rhodrin/devito
|
cd1ae745272eb0315aa1c36038a3174f1817e0d0
|
[
"MIT"
] | null | null | null |
examples/seismic/model.py
|
rhodrin/devito
|
cd1ae745272eb0315aa1c36038a3174f1817e0d0
|
[
"MIT"
] | null | null | null |
examples/seismic/model.py
|
rhodrin/devito
|
cd1ae745272eb0315aa1c36038a3174f1817e0d0
|
[
"MIT"
] | null | null | null |
import numpy as np
from sympy import sin, Abs
from devito import (Grid, SubDomain, Function, Constant,
SubDimension, Eq, Inc, Operator)
from devito.builtins import initialize_function, gaussian_smooth, mmax
from devito.tools import as_tuple
__all__ = ['Model', 'ModelElastic', 'ModelViscoelastic']
def initialize_damp(damp, nbl, spacing, mask=False):
"""
Initialise damping field with an absorbing boundary layer.
Parameters
----------
damp : Function
The damping field for absorbing boundary condition.
nbl : int
Number of points in the damping layer.
spacing :
Grid spacing coefficient.
mask : bool, optional
whether the dampening is a mask or layer.
mask => 1 inside the domain and decreases in the layer
not mask => 0 inside the domain and increase in the layer
"""
dampcoeff = 1.5 * np.log(1.0 / 0.001) / (nbl)
eqs = [Eq(damp, 1.0)] if mask else []
for d in damp.dimensions:
# left
dim_l = SubDimension.left(name='abc_%s_l' % d.name, parent=d,
thickness=nbl)
pos = Abs((nbl - (dim_l - d.symbolic_min) + 1) / float(nbl))
val = dampcoeff * (pos - sin(2*np.pi*pos)/(2*np.pi))
val = -val if mask else val
eqs += [Inc(damp.subs({d: dim_l}), val/d.spacing)]
# right
dim_r = SubDimension.right(name='abc_%s_r' % d.name, parent=d,
thickness=nbl)
pos = Abs((nbl - (d.symbolic_max - dim_r) + 1) / float(nbl))
val = dampcoeff * (pos - sin(2*np.pi*pos)/(2*np.pi))
val = -val if mask else val
eqs += [Inc(damp.subs({d: dim_r}), val/d.spacing)]
Operator(eqs, name='initdamp')()
class PhysicalDomain(SubDomain):
name = 'phydomain'
def __init__(self, nbl):
super(PhysicalDomain, self).__init__()
self.nbl = nbl
def define(self, dimensions):
return {d: ('middle', self.nbl, self.nbl) for d in dimensions}
class GenericModel(object):
"""
General model class with common properties
"""
def __init__(self, origin, spacing, shape, space_order, nbl=20,
dtype=np.float32, subdomains=(), damp_mask=False):
self.shape = shape
self.nbl = int(nbl)
self.origin = tuple([dtype(o) for o in origin])
# Origin of the computational domain with boundary to inject/interpolate
# at the correct index
origin_pml = tuple([dtype(o - s*nbl) for o, s in zip(origin, spacing)])
phydomain = PhysicalDomain(self.nbl)
subdomains = subdomains + (phydomain, )
shape_pml = np.array(shape) + 2 * self.nbl
# Physical extent is calculated per cell, so shape - 1
extent = tuple(np.array(spacing) * (shape_pml - 1))
self.grid = Grid(extent=extent, shape=shape_pml, origin=origin_pml, dtype=dtype,
subdomains=subdomains)
if self.nbl != 0:
# Create dampening field as symbol `damp`
self.damp = Function(name="damp", grid=self.grid)
initialize_damp(self.damp, self.nbl, self.spacing, mask=damp_mask)
self._physical_parameters = ['damp']
else:
self.damp = 1 if damp_mask else 0
self._physical_parameters = []
def physical_params(self, **kwargs):
"""
Return all set physical parameters and update to input values if provided
"""
known = [getattr(self, i) for i in self.physical_parameters]
return {i.name: kwargs.get(i.name, i) or i for i in known}
def _gen_phys_param(self, field, name, space_order, is_param=False,
default_value=0):
if field is None:
return default_value
if isinstance(field, np.ndarray):
function = Function(name=name, grid=self.grid, space_order=space_order,
parameter=is_param)
initialize_function(function, field, self.nbl)
else:
function = Constant(name=name, value=field)
self._physical_parameters.append(name)
return function
@property
def physical_parameters(self):
return as_tuple(self._physical_parameters)
@property
def dim(self):
"""
Spatial dimension of the problem and model domain.
"""
return self.grid.dim
@property
def spacing(self):
"""
Grid spacing for all fields in the physical model.
"""
return self.grid.spacing
@property
def space_dimensions(self):
"""
Spatial dimensions of the grid
"""
return self.grid.dimensions
@property
def spacing_map(self):
"""
Map between spacing symbols and their values for each `SpaceDimension`.
"""
return self.grid.spacing_map
@property
def dtype(self):
"""
Data type for all assocaited data objects.
"""
return self.grid.dtype
@property
def domain_size(self):
"""
Physical size of the domain as determined by shape and spacing
"""
return tuple((d-1) * s for d, s in zip(self.shape, self.spacing))
class Model(GenericModel):
"""
The physical model used in seismic inversion processes.
Parameters
----------
origin : tuple of floats
Origin of the model in m as a tuple in (x,y,z) order.
spacing : tuple of floats
Grid size in m as a Tuple in (x,y,z) order.
shape : tuple of int
Number of grid points size in (x,y,z) order.
space_order : int
Order of the spatial stencil discretisation.
vp : array_like or float
Velocity in km/s.
nbl : int, optional
The number of absorbin layers for boundary damping.
dtype : np.float32 or np.float64
Defaults to 32.
epsilon : array_like or float, optional
Thomsen epsilon parameter (0<epsilon<1).
delta : array_like or float
Thomsen delta parameter (0<delta<1), delta<epsilon.
theta : array_like or float
Tilt angle in radian.
phi : array_like or float
Asymuth angle in radian.
The `Model` provides two symbolic data objects for the
creation of seismic wave propagation operators:
m : array_like or float
The square slowness of the wave.
damp : Function
The damping field for absorbing boundary condition.
"""
def __init__(self, origin, spacing, shape, space_order, vp, nbl=20,
dtype=np.float32, epsilon=None, delta=None, theta=None, phi=None,
subdomains=(), **kwargs):
super(Model, self).__init__(origin, spacing, shape, space_order, nbl, dtype,
subdomains)
# Create square slowness of the wave as symbol `m`
self._vp = self._gen_phys_param(vp, 'vp', space_order)
# Additional parameter fields for TTI operators
self.epsilon = self._gen_phys_param(epsilon, 'epsilon', space_order)
self.scale = 1 if epsilon is None else np.sqrt(1 + 2 * np.max(epsilon))
self.delta = self._gen_phys_param(delta, 'delta', space_order)
self.theta = self._gen_phys_param(theta, 'theta', space_order)
if self.grid.dim > 2:
self.phi = self._gen_phys_param(phi, 'phi', space_order)
@property
def _max_vp(self):
return mmax(self.vp)
@property
def critical_dt(self):
"""
Critical computational time step value from the CFL condition.
"""
# For a fixed time order this number decreases as the space order increases.
#
# The CFL condtion is then given by
# dt <= coeff * h / (max(velocity))
coeff = 0.38 if len(self.shape) == 3 else 0.42
dt = self.dtype(coeff * np.min(self.spacing) / (self.scale*self._max_vp))
return self.dtype("%.3e" % dt)
@property
def vp(self):
"""
`numpy.ndarray` holding the model velocity in km/s.
Notes
-----
Updating the velocity field also updates the square slowness
``self.m``. However, only ``self.m`` should be used in seismic
operators, since it is of type `Function`.
"""
return self._vp
@vp.setter
def vp(self, vp):
"""
Set a new velocity model and update square slowness.
Parameters
----------
vp : float or array
New velocity in km/s.
"""
# Update the square slowness according to new value
if isinstance(vp, np.ndarray):
if vp.shape == self.vp.shape:
self.vp.data[:] = vp[:]
elif vp.shape == self.shape:
initialize_function(self._vp, vp, self.nbl)
else:
raise ValueError("Incorrect input size %s for model of size" % vp.shape +
" %s without or %s with padding" % (self.shape,
self.vp.shape))
else:
self._vp.data = vp
@property
def m(self):
return 1 / (self.vp * self.vp)
def smooth(self, physical_parameters, sigma=5.0):
"""
Apply devito.gaussian_smooth to model physical parameters.
Parameters
----------
physical_parameters : string or tuple of string
Names of the fields to be smoothed.
sigma : float
Standard deviation of the smoothing operator.
"""
model_parameters = self.physical_params()
for i in physical_parameters:
gaussian_smooth(model_parameters[i], sigma=sigma)
return
class ModelElastic(GenericModel):
"""
The physical model used in seismic inversion processes.
Parameters
----------
origin : tuple of floats
Origin of the model in m as a tuple in (x,y,z) order.
spacing : tuple of floats, optional
Grid size in m as a Tuple in (x,y,z) order.
shape : tuple of int
Number of grid points size in (x,y,z) order.
space_order : int
Order of the spatial stencil discretisation.
vp : float or array
P-wave velocity in km/s.
vs : float or array
S-wave velocity in km/s.
nbl : int, optional
The number of absorbing layers for boundary damping.
rho : float or array, optional
Density in kg/cm^3 (rho=1 for water).
The `ModelElastic` provides a symbolic data objects for the
creation of seismic wave propagation operators:
damp : Function, optional
The damping field for absorbing boundary condition.
"""
def __init__(self, origin, spacing, shape, space_order, vp, vs, rho, nbl=20,
subdomains=(), dtype=np.float32):
super(ModelElastic, self).__init__(origin, spacing, shape, space_order,
nbl=nbl, subdomains=subdomains, dtype=dtype,
damp_mask=True)
self.maxvp = np.max(vp)
self.lam = self._gen_phys_param((vp**2 - 2. * vs**2)*rho, 'lam', space_order,
is_param=True)
self.mu = self._gen_phys_param(vs**2 * rho, 'mu', space_order, is_param=True)
self.irho = self._gen_phys_param(1./rho, 'irho', space_order, is_param=True)
@property
def critical_dt(self):
"""
Critical computational time step value from the CFL condition.
"""
# For a fixed time order this number decreases as the space order increases.
#
# The CFL condtion is then given by
# dt < h / (sqrt(ndim) * max(vp)))
dt = .85*np.min(self.spacing) / (np.sqrt(3.)*self.maxvp)
return self.dtype("%.3e" % dt)
class ModelViscoelastic(ModelElastic):
"""
The physical model used in seismic inversion processes.
Parameters
----------
origin : tuple of floats
Origin of the model in m as a tuple in (x,y,z) order.
spacing : tuple of floats, optional
Grid size in m as a Tuple in (x,y,z) order.
shape : tuple of int
Number of grid points size in (x,y,z) order.
space_order : int
Order of the spatial stencil discretisation.
vp : float or array
P-wave velocity in km/s.
qp : float or array
P-wave quality factor (dimensionless).
vs : float or array
S-wave velocity in km/s.
qs : float or array
S-wave qulaity factor (dimensionless).
nbl : int, optional
The number of absorbing layers for boundary damping.
rho : float or array, optional
Density in kg/cm^3 (rho=1 for water).
The `ModelElastic` provides a symbolic data objects for the
creation of seismic wave propagation operators:
damp : Function, optional
The damping field for absorbing boundary condition.
"""
def __init__(self, origin, spacing, shape, space_order, vp, qp, vs, qs, rho,
nbl=20, subdomains=(), dtype=np.float32):
super(ModelViscoelastic, self).__init__(origin, spacing, shape, space_order,
vp, vs, rho, nbl=nbl,
subdomains=subdomains, dtype=dtype)
self.qp = self._gen_phys_param(qp, 'qp', space_order, is_param=True)
self.qs = self._gen_phys_param(qs, 'qs', space_order, is_param=True)
@property
def critical_dt(self):
"""
Critical computational time step value from the CFL condition.
"""
# For a fixed time order this number decreases as the space order increases.
# See Blanch, J. O., 1995, "A study of viscous effects in seismic modelling,
# imaging, and inversion: methodology, computational aspects and sensitivity"
# for further details:
dt = .85*np.min(self.spacing) / (np.sqrt(self.grid.dim)*self.maxvp)
return self.dtype("%.3e" % dt)
| 35.0975
| 89
| 0.593917
|
a17ef349bae2a8bea09c1d41f071c2f5f98df6d5
| 1,606
|
py
|
Python
|
.history/my_classes/FunctionParameters/default_values_20210702203211.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FunctionParameters/default_values_20210702203211.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FunctionParameters/default_values_20210702203211.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
"""[Default values]
What happens at run time...
When modules are loaded: All the code is executed immediately.
Module Code
a = 10 the interger object 10 is created and a references it.
def func(a): the function object is created, and func references it.
print(a)
func(a) the function is executed
What about default values?
Module code
def func(a=10): the function object is created, and func references it
print(a) the integer object 10 is evaluated/created and is assigned as the default value for a
func() the function is executed
by the time this happens, the default value for a has already been evaluated and assigned - it is not re-evaluated when the function is called
So what?
Consider this:
We want to create a function that will write a log entry to the console with a user-specified event date/time. If the user does not supply a date/time, we want to set it to the current date/time.
from datetime import datetime
def log(msg, *, dt=datetime.utcnow()):
print('{0}: {1}'.format(dt, msg))
log('message 1') -> 2017-08-21 20:54:37.706994 : message 1
a few minutes later
log('message 2') -> 2017-08-21 20:54:37.706994 : message 3 ## note the same time is shown above.
Solution Pattern = need to show current time it was executed
We set a default for dt to None
If dt is None, set it to current date/time
otherwise, use what the caller specified for datetime
from datetime import datetime
"""
| 28.678571
| 195
| 0.662516
|
e35f69981ba3366e9e6802d6dbc127b76b9b8a54
| 3,618
|
py
|
Python
|
vissl/hooks/swav_hooks.py
|
blazejdolicki/vissl
|
9c10748a19fb1c637f32687142c8cd685f2410ff
|
[
"MIT"
] | 2,512
|
2021-01-27T18:44:44.000Z
|
2022-03-31T19:33:49.000Z
|
vissl/hooks/swav_hooks.py
|
blazejdolicki/vissl
|
9c10748a19fb1c637f32687142c8cd685f2410ff
|
[
"MIT"
] | 361
|
2021-01-27T20:12:09.000Z
|
2022-03-31T12:39:34.000Z
|
vissl/hooks/swav_hooks.py
|
blazejdolicki/vissl
|
9c10748a19fb1c637f32687142c8cd685f2410ff
|
[
"MIT"
] | 277
|
2021-01-29T08:09:02.000Z
|
2022-03-31T07:57:35.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import torch
import torch.nn as nn
from classy_vision import tasks
from classy_vision.hooks.classy_hook import ClassyHook
class SwAVUpdateQueueScoresHook(ClassyHook):
"""
Update queue scores, useful with small batches and helps getting
meaningful gradients.
"""
on_start = ClassyHook._noop
on_phase_start = ClassyHook._noop
on_loss_and_meter = ClassyHook._noop
on_backward = ClassyHook._noop
on_update = ClassyHook._noop
on_step = ClassyHook._noop
on_phase_end = ClassyHook._noop
on_end = ClassyHook._noop
def on_forward(self, task) -> None:
"""
If we want to use queue in SwAV training,
update the queue scores after every forward.
"""
if not task.config["LOSS"]["name"] == "swav_loss":
return
if not task.loss.swav_criterion.use_queue:
return
try:
task.loss.swav_criterion.compute_queue_scores(task.model.heads[0])
except AttributeError:
task.loss.swav_criterion.compute_queue_scores(task.model.module.heads[0])
class NormalizePrototypesHook(ClassyHook):
"""
L2 Normalize the prototypes in swav training. Optional.
"""
on_start = ClassyHook._noop
on_phase_start = ClassyHook._noop
on_forward = ClassyHook._noop
on_loss_and_meter = ClassyHook._noop
on_backward = ClassyHook._noop
on_phase_end = ClassyHook._noop
on_end = ClassyHook._noop
on_step = ClassyHook._noop
def on_update(self, task: "tasks.ClassyTask") -> None:
"""
Optionally normalize prototypes
"""
if not task.config["LOSS"]["name"] == "swav_loss":
return
if not task.config.LOSS["swav_loss"].normalize_last_layer:
return
with torch.no_grad():
try:
# This is either single GPU model or a FSDP.
assert len(task.model.heads) == 1
for j in range(task.model.heads[0].nmb_heads):
module = getattr(task.model.heads[0], "prototypes" + str(j))
# Determine the context we need to use. For FSDP, we
# need the summon_full_params context, which ensures that
# full weights for this layer is all_gathered and after
# normalization, the changes are persisted in the local
# shards. All ranks do the same normalization, so all
# changes should be saved.
ctx = contextlib.suppress()
if hasattr(module, "summon_full_params"):
ctx = module.summon_full_params()
with ctx:
w = module.weight.data.clone()
w = nn.functional.normalize(w, dim=1, p=2)
module.weight.copy_(w)
except AttributeError:
# This is a DDP wrapped one.
assert len(task.model.module.heads) == 1
for j in range(task.model.module.heads[0].nmb_heads):
w = getattr(
task.model.module.heads[0], "prototypes" + str(j)
).weight.data.clone()
w = nn.functional.normalize(w, dim=1, p=2)
getattr(
task.model.module.heads[0], "prototypes" + str(j)
).weight.copy_(w)
| 37.6875
| 85
| 0.590934
|
fd37b121c8ae84c4549baa98f91c37138456f744
| 2,720
|
py
|
Python
|
venv/Scripts/alpaca.py
|
rexliu3/StockTradingBotCloud
|
46b732b9c05f73bc0e856a3c4a16854b6d12e18e
|
[
"MIT"
] | null | null | null |
venv/Scripts/alpaca.py
|
rexliu3/StockTradingBotCloud
|
46b732b9c05f73bc0e856a3c4a16854b6d12e18e
|
[
"MIT"
] | null | null | null |
venv/Scripts/alpaca.py
|
rexliu3/StockTradingBotCloud
|
46b732b9c05f73bc0e856a3c4a16854b6d12e18e
|
[
"MIT"
] | 1
|
2020-06-28T11:47:47.000Z
|
2020-06-28T11:47:47.000Z
|
#!C:\Users\rexli\Desktop\Commonly Used\StockTradingBot\venv\Scripts\python.exe
"""
alpaca.py
Alpaca
"""
import sys
from alpacalib.regex import Regex
from alpacalib.regex import RegexError
from alpacalib.nfa import NFA
from alpacalib.dfa import DFA
from alpacalib.dot import Dot
from alpacalib.options import Options
from alpacalib.options import OptionsError
from alpacalib.input import Input
from alpacalib.input import InputError
from alpacalib.output import Output
from alpacalib.output import OutputError
class Alpaca:
def main():
try:
Options.parse(sys.argv[1:])
except OptionsError as err:
print(err.args[0])
print()
Alpaca.show_help()
return
if Options.is_show_help():
Alpaca.show_help()
return
if Options.is_show_version():
Alpaca.show_version()
return
Alpaca.translate_regex()
def translate_regex():
texts = None
try:
texts = Input.get_regexes()
except InputError as err:
print(err.args[0], file=sys.stderr)
return
nfas = []
for i in range(len(texts)):
try:
regex = Regex(texts[i])
nfas.append(regex.transform())
except RegexError as err:
print('SyntaxError(%d): %s' % (i+1, err.args[0]))
return
nfa = NFA()
nfa.merge(nfas)
dfa = nfa.transform()
dfa.minimize()
dot = dfa.transform()
try:
Output.output_script(dot.script)
except OutputError as err:
print(err.args[0], file=sys.stderr)
return
def show_version():
print('alpaca.py 1.0.0')
print('Copyright (C) 2013 activesys.wb@gmail.com')
def show_help():
print('USAGE')
print(' alpaca.py [OPTION] [input-file]')
print()
print('DESCRIPTION')
print(' alpaca.py translate regular expression to DFA, and output the DFA as DOT format.')
print()
print('OPTION')
print('-o output-file')
print('--output=output-file')
print(' write output to \'output-file\', write output to stdout when this option not present.')
print('-h')
print('--help')
print(' show this usage and exit.')
print('-V')
print('--version')
print(' show copyright and version message and exit.')
print()
print('input-file')
print(' read input from \'input-file\', read input from stdin when \'input-file\' not present.')
print()
if __name__ == '__main__':
Alpaca.main()
| 27.474747
| 107
| 0.575
|
88900cf562896d235651893abbe2336d1dba421d
| 1,411
|
py
|
Python
|
setup.py
|
Archstacker/thefuck
|
ebe53f0d181c28ec2f7a86f46d7d51a7d48bbd9e
|
[
"MIT"
] | null | null | null |
setup.py
|
Archstacker/thefuck
|
ebe53f0d181c28ec2f7a86f46d7d51a7d48bbd9e
|
[
"MIT"
] | null | null | null |
setup.py
|
Archstacker/thefuck
|
ebe53f0d181c28ec2f7a86f46d7d51a7d48bbd9e
|
[
"MIT"
] | 1
|
2021-06-21T09:01:08.000Z
|
2021-06-21T09:01:08.000Z
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
import os
if os.environ.get('CONVERT_README'):
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
else:
long_description = ''
version = sys.version_info[:2]
if version < (2, 7):
print('thefuck requires Python version 2.7 or later' +
' ({}.{} detected).'.format(*version))
sys.exit(-1)
elif (3, 0) < version < (3, 3):
print('thefuck requires Python version 3.3 or later' +
' ({}.{} detected).'.format(*version))
sys.exit(-1)
VERSION = '2.8'
install_requires = ['psutil', 'colorama', 'six', 'decorator']
extras_require = {':python_version<"3.4"': ['pathlib']}
setup(name='thefuck',
version=VERSION,
description="Magnificent app which corrects your previous console command",
long_description=long_description,
author='Vladimir Iakovlev',
author_email='nvbn.rm@gmail.com',
url='https://github.com/nvbn/thefuck',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples',
'tests', 'release']),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require=extras_require,
entry_points={'console_scripts': [
'thefuck = thefuck.main:main',
'thefuck-alias = thefuck.main:print_alias']})
| 31.355556
| 81
| 0.638554
|
b17759e697e4be2ad2bddf87e04a29ddf00c885f
| 196
|
py
|
Python
|
nsd1902/devops/day01/zb.py
|
MrWangwf/nsd2019
|
5e859b4b1926dc098d236be3720779c50d0a55fc
|
[
"Apache-2.0"
] | 1
|
2019-09-19T04:53:22.000Z
|
2019-09-19T04:53:22.000Z
|
nsd1902/devops/day01/zb.py
|
MrWangwf/nsd2019
|
5e859b4b1926dc098d236be3720779c50d0a55fc
|
[
"Apache-2.0"
] | null | null | null |
nsd1902/devops/day01/zb.py
|
MrWangwf/nsd2019
|
5e859b4b1926dc098d236be3720779c50d0a55fc
|
[
"Apache-2.0"
] | 1
|
2021-12-28T04:26:02.000Z
|
2021-12-28T04:26:02.000Z
|
import os
import time
print('starting...')
retval = os.fork()
if retval:
print('父进程')
time.sleep(60)
else:
print('子进程')
time.sleep(15)
exit()
print('done')
# watch -n1 ps a
| 11.529412
| 20
| 0.591837
|
8ae630248a301fed08c2d6515384cbd43f160827
| 2,986
|
py
|
Python
|
services/simulation/periodictasks.py
|
rtubio/server
|
3bb15f4d4dcd543d6f95d1fda2cb737de0bb9a9b
|
[
"Apache-2.0"
] | 4
|
2015-03-23T16:34:53.000Z
|
2017-12-12T11:41:54.000Z
|
services/simulation/periodictasks.py
|
rtubio/server
|
3bb15f4d4dcd543d6f95d1fda2cb737de0bb9a9b
|
[
"Apache-2.0"
] | 42
|
2015-01-08T22:21:04.000Z
|
2021-12-13T19:48:44.000Z
|
services/simulation/periodictasks.py
|
rtubio/server
|
3bb15f4d4dcd543d6f95d1fda2cb737de0bb9a9b
|
[
"Apache-2.0"
] | 2
|
2015-04-04T15:23:35.000Z
|
2017-07-23T23:14:06.000Z
|
"""
Copyright 2013, 2014 Ricardo Tubio-Pardavila
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = 'rtubiopa@calpoly.edu'
import logging
from periodically import decorators
from services.common import misc
from services.simulation.models import groundtracks as gt_models
from services.simulation.models import passes as pass_models
logger = logging.getLogger('simulation')
@decorators.daily()
def propagate_groundtracks():
"""Periodic groundtracks propagation
"""
logger.info('>>> Populating groundtracks')
try:
gt_models.GroundTrack.objects.propagate()
except Exception as ex:
logger.exception(
'>>> Exception populating groundtracks, ex = ' + str(ex), ex
)
return
logger.info('>>> DONE propagating groundtracks')
@decorators.daily()
def clean_groundtracks(threshold=misc.get_now_utc()):
"""Periodic groundtracks cleanup
@param threshold: datetime threshold to clean the old groundtracks
"""
logger.info('>>> Cleaning groundtracks')
try:
no_deleted = gt_models.GroundTrack.objects.delete_older(
threshold
).delete()
logger.debug('>>> tasks@clean_passes.filtered = ' + str(no_deleted))
except Exception as ex:
logger.exception(
'>>> Exception cleaning groundtracks, ex = ' + str(ex), ex
)
return
logger.info('>>> DONE cleaning groundtracks')
@decorators.daily()
def propagate_passes():
"""Periodic groundtracks propagation
"""
logger.info('>>> Propagating passes')
try:
pass_models.PassSlots.objects.propagate()
except Exception as ex:
logger.warning('>>> Exception propagating passes, ex = ' + str(ex), ex)
return
logger.info('>>> DONE propagating groundtracks')
@decorators.daily()
def clean_passes(threshold=misc.get_now_utc()):
"""Periodic groundtracks cleanup
Cleans the outdated passes from the database.
@param threshold: datetime threshold to clean the old passes
"""
logger.info('>>> Cleaning passes, threshold = ' + str(threshold))
try:
no_deleted = pass_models.PassSlots.objects.filter(
end__lte=threshold
).delete()
logger.debug('>>> tasks@clean_passes.filtered = ' + str(no_deleted))
except Exception as ex:
logger.exception('>>> Exception cleaning passes, ex = ' + str(ex), ex)
return
logger.info('>>> DONE cleaning passes')
| 28.711538
| 79
| 0.679839
|
9b44732bf8fa8e951bf6b19f0f397080624c2ade
| 1,064
|
py
|
Python
|
nereid/nereid/api/api_v1/endpoints/reference_data.py
|
austinorr/nereid
|
5e4b19d6d1a57f1e514b775ca1b3851ddf8eb7f7
|
[
"BSD-3-Clause"
] | null | null | null |
nereid/nereid/api/api_v1/endpoints/reference_data.py
|
austinorr/nereid
|
5e4b19d6d1a57f1e514b775ca1b3851ddf8eb7f7
|
[
"BSD-3-Clause"
] | 3
|
2020-01-11T21:51:16.000Z
|
2020-02-18T16:11:34.000Z
|
nereid/nereid/api/api_v1/endpoints/reference_data.py
|
austinorr/nereid
|
5e4b19d6d1a57f1e514b775ca1b3851ddf8eb7f7
|
[
"BSD-3-Clause"
] | 1
|
2019-04-23T18:25:25.000Z
|
2019-04-23T18:25:25.000Z
|
import ujson as json
from typing import Dict, Any
from fastapi import APIRouter, HTTPException, Depends
from nereid.api.api_v1.models.reference_models import ReferenceDataResponse
from nereid.api.api_v1.utils import get_valid_context
from nereid.core.io import load_json
router = APIRouter()
@router.get(
"/reference_data", tags=["reference_data"], response_model=ReferenceDataResponse
)
async def get_reference_data_json(
context: dict = Depends(get_valid_context), filename: str = ""
) -> Dict[str, Any]:
filepath = ""
filepath = f"{context.get('data_path', '')}/{filename}.json"
state, region = context["state"], context["region"]
try:
filedata = load_json(filepath)
except FileNotFoundError as e:
detail = f"state '{state}', region '{region}', or filename '{filename}' not found. {filepath}"
raise HTTPException(status_code=400, detail=detail)
response = dict(
status="SUCCESS",
data=dict(state=state, region=region, file=filename, filedata=filedata),
)
return response
| 28.756757
| 102
| 0.706767
|
ca943ddefebda9644f69a01d77596f19e662cec6
| 83,358
|
py
|
Python
|
zerver/lib/bugdown/__init__.py
|
shreyanshdwivedi/zulip
|
fe39ad04e191c4d0d4a4b54fd94529a9df9f72ae
|
[
"Apache-2.0"
] | null | null | null |
zerver/lib/bugdown/__init__.py
|
shreyanshdwivedi/zulip
|
fe39ad04e191c4d0d4a4b54fd94529a9df9f72ae
|
[
"Apache-2.0"
] | null | null | null |
zerver/lib/bugdown/__init__.py
|
shreyanshdwivedi/zulip
|
fe39ad04e191c4d0d4a4b54fd94529a9df9f72ae
|
[
"Apache-2.0"
] | null | null | null |
# Zulip's main markdown implementation. See docs/subsystems/markdown.md for
# detailed documentation on our markdown syntax.
from typing import (Any, Callable, Dict, Iterable, List, NamedTuple,
Optional, Set, Tuple, TypeVar, Union, cast)
from mypy_extensions import TypedDict
from typing.re import Match
import markdown
import logging
import traceback
import urllib
import re
import os
import html
import platform
import time
import functools
import ujson
import xml.etree.cElementTree as etree
from xml.etree.cElementTree import Element, SubElement
from collections import deque, defaultdict
import requests
from django.core import mail
from django.conf import settings
from django.db.models import Q
from markdown.extensions import codehilite
from zerver.lib.bugdown import fenced_code
from zerver.lib.bugdown.fenced_code import FENCE_RE
from zerver.lib.camo import get_camo_url
from zerver.lib.emoji import translate_emoticons, emoticon_regex
from zerver.lib.mention import possible_mentions, \
possible_user_group_mentions, extract_user_group
from zerver.lib.url_encoding import encode_stream
from zerver.lib.thumbnail import is_thumbor_enabled
from zerver.lib.timeout import timeout, TimeoutExpired
from zerver.lib.cache import cache_with_key, NotFoundInCache
from zerver.lib.url_preview import preview as link_preview
from zerver.models import (
all_realm_filters,
get_active_streams,
MAX_MESSAGE_LENGTH,
Message,
Realm,
RealmFilter,
realm_filters_for_realm,
UserProfile,
UserGroup,
UserGroupMembership,
)
import zerver.lib.mention as mention
from zerver.lib.tex import render_tex
from zerver.lib.exceptions import BugdownRenderingException
from zerver.lib.bugdown import arguments
FullNameInfo = TypedDict('FullNameInfo', {
'id': int,
'email': str,
'full_name': str,
})
# Format version of the bugdown rendering; stored along with rendered
# messages so that we can efficiently determine what needs to be re-rendered
version = 1
_T = TypeVar('_T')
ElementStringNone = Union[Element, Optional[str]]
AVATAR_REGEX = r'!avatar\((?P<email>[^)]*)\)'
GRAVATAR_REGEX = r'!gravatar\((?P<email>[^)]*)\)'
EMOJI_REGEX = r'(?P<syntax>:[\w\-\+]+:)'
STREAM_LINK_REGEX = r"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
\#\*\* # and after hash sign followed by double asterisks
(?P<stream_name>[^\*]+) # stream name can contain anything
\*\* # ends by double asterisks
"""
bugdown_logger = logging.getLogger()
def rewrite_local_links_to_relative(link: str) -> str:
""" If the link points to a local destination we can just switch to that
instead of opening a new tab. """
if arguments.db_data:
realm_uri_prefix = arguments.db_data['realm_uri'] + "/"
if link.startswith(realm_uri_prefix):
# +1 to skip the `/` before the hash link.
return link[len(realm_uri_prefix):]
return link
def url_embed_preview_enabled_for_realm(message: Optional[Message]) -> bool:
if message is not None:
realm = message.get_realm() # type: Optional[Realm]
else:
realm = None
if not settings.INLINE_URL_EMBED_PREVIEW:
return False
if realm is None:
return True
return realm.inline_url_embed_preview
def image_preview_enabled_for_realm() -> bool:
if arguments.current_message is not None:
realm = arguments.current_message.get_realm() # type: Optional[Realm]
else:
realm = None
if not settings.INLINE_IMAGE_PREVIEW:
return False
if realm is None:
return True
return realm.inline_image_preview
def list_of_tlds() -> List[str]:
# HACK we manually blacklist a few domains
blacklist = ['PY\n', "MD\n"]
# tlds-alpha-by-domain.txt comes from http://data.iana.org/TLD/tlds-alpha-by-domain.txt
tlds_file = os.path.join(os.path.dirname(__file__), 'tlds-alpha-by-domain.txt')
tlds = [tld.lower().strip() for tld in open(tlds_file, 'r')
if tld not in blacklist and not tld[0].startswith('#')]
tlds.sort(key=len, reverse=True)
return tlds
def walk_tree(root: Element,
processor: Callable[[Element], Optional[_T]],
stop_after_first: bool=False) -> List[_T]:
results = []
queue = deque([root])
while queue:
currElement = queue.popleft()
for child in currElement.getchildren():
if child.getchildren():
queue.append(child)
result = processor(child)
if result is not None:
results.append(result)
if stop_after_first:
return results
return results
ElementFamily = NamedTuple('ElementFamily', [
('grandparent', Optional[Element]),
('parent', Element),
('child', Element)
])
ResultWithFamily = NamedTuple('ResultWithFamily', [
('family', ElementFamily),
('result', Any)
])
ElementPair = NamedTuple('ElementPair', [
('parent', Optional[Element]),
('value', Element)
])
def walk_tree_with_family(root: Element,
processor: Callable[[Element], Optional[_T]]
) -> List[ResultWithFamily]:
results = []
queue = deque([ElementPair(parent=None, value=root)])
while queue:
currElementPair = queue.popleft()
for child in currElementPair.value.getchildren():
if child.getchildren():
queue.append(ElementPair(parent=currElementPair, value=child)) # type: ignore # Lack of Deque support in typing module for Python 3.4.3
result = processor(child)
if result is not None:
if currElementPair.parent is not None:
grandparent_element = cast(ElementPair, currElementPair.parent)
grandparent = grandparent_element.value
else:
grandparent = None
family = ElementFamily(
grandparent=grandparent,
parent=currElementPair.value,
child=child
)
results.append(ResultWithFamily(
family=family,
result=result
))
return results
# height is not actually used
def add_a(
root: Element,
url: str,
link: str,
title: Optional[str]=None,
desc: Optional[str]=None,
class_attr: str="message_inline_image",
data_id: Optional[str]=None,
insertion_index: Optional[int]=None,
use_thumbnails: Optional[bool]=True
) -> None:
title = title if title is not None else url_filename(link)
title = title if title else ""
desc = desc if desc is not None else ""
if insertion_index is not None:
div = markdown.util.etree.Element("div")
root.insert(insertion_index, div)
else:
div = markdown.util.etree.SubElement(root, "div")
div.set("class", class_attr)
a = markdown.util.etree.SubElement(div, "a")
a.set("href", link)
a.set("target", "_blank")
a.set("title", title)
if data_id is not None:
a.set("data-id", data_id)
img = markdown.util.etree.SubElement(a, "img")
if is_thumbor_enabled() and use_thumbnails:
# See docs/thumbnailing.md for some high-level documentation.
#
# We strip leading '/' from relative URLs here to ensure
# consistency in what gets passed to /thumbnail
url = url.lstrip('/')
img.set("src", "/thumbnail?url={0}&size=thumbnail".format(
urllib.parse.quote(url, safe='')
))
img.set('data-src-fullsize', "/thumbnail?url={0}&size=full".format(
urllib.parse.quote(url, safe='')
))
else:
# TODO: We might want to rename use_thumbnails to
# !already_thumbnailed for clarity.
img.set("src", url)
if class_attr == "message_inline_ref":
summary_div = markdown.util.etree.SubElement(div, "div")
title_div = markdown.util.etree.SubElement(summary_div, "div")
title_div.set("class", "message_inline_image_title")
title_div.text = title
desc_div = markdown.util.etree.SubElement(summary_div, "desc")
desc_div.set("class", "message_inline_image_desc")
def add_embed(root: Element, link: str, extracted_data: Dict[str, Any]) -> None:
container = markdown.util.etree.SubElement(root, "div")
container.set("class", "message_embed")
img_link = extracted_data.get('image')
if img_link:
parsed_img_link = urllib.parse.urlparse(img_link)
# Append domain where relative img_link url is given
if not parsed_img_link.netloc:
parsed_url = urllib.parse.urlparse(link)
domain = '{url.scheme}://{url.netloc}/'.format(url=parsed_url)
img_link = urllib.parse.urljoin(domain, img_link)
img = markdown.util.etree.SubElement(container, "a")
img.set("style", "background-image: url(" + img_link + ")")
img.set("href", link)
img.set("target", "_blank")
img.set("class", "message_embed_image")
data_container = markdown.util.etree.SubElement(container, "div")
data_container.set("class", "data-container")
title = extracted_data.get('title')
if title:
title_elm = markdown.util.etree.SubElement(data_container, "div")
title_elm.set("class", "message_embed_title")
a = markdown.util.etree.SubElement(title_elm, "a")
a.set("href", link)
a.set("target", "_blank")
a.set("title", title)
a.text = title
description = extracted_data.get('description')
if description:
description_elm = markdown.util.etree.SubElement(data_container, "div")
description_elm.set("class", "message_embed_description")
description_elm.text = description
@cache_with_key(lambda tweet_id: tweet_id, cache_name="database", with_statsd_key="tweet_data")
def fetch_tweet_data(tweet_id: str) -> Optional[Dict[str, Any]]:
if settings.TEST_SUITE:
from . import testing_mocks
res = testing_mocks.twitter(tweet_id)
else:
creds = {
'consumer_key': settings.TWITTER_CONSUMER_KEY,
'consumer_secret': settings.TWITTER_CONSUMER_SECRET,
'access_token_key': settings.TWITTER_ACCESS_TOKEN_KEY,
'access_token_secret': settings.TWITTER_ACCESS_TOKEN_SECRET,
}
if not all(creds.values()):
return None
# We lazily import twitter here because its import process is
# surprisingly slow, and doing so has a significant impact on
# the startup performance of `manage.py` commands.
import twitter
try:
api = twitter.Api(tweet_mode='extended', **creds)
# Sometimes Twitter hangs on responses. Timing out here
# will cause the Tweet to go through as-is with no inline
# preview, rather than having the message be rejected
# entirely. This timeout needs to be less than our overall
# formatting timeout.
tweet = timeout(3, api.GetStatus, tweet_id)
res = tweet.AsDict()
except AttributeError:
bugdown_logger.error('Unable to load twitter api, you may have the wrong '
'library installed, see https://github.com/zulip/zulip/issues/86')
return None
except TimeoutExpired:
# We'd like to try again later and not cache the bad result,
# so we need to re-raise the exception (just as though
# we were being rate-limited)
raise
except twitter.TwitterError as e:
t = e.args[0]
if len(t) == 1 and ('code' in t[0]) and (t[0]['code'] == 34):
# Code 34 means that the message doesn't exist; return
# None so that we will cache the error
return None
elif len(t) == 1 and ('code' in t[0]) and (t[0]['code'] == 88 or
t[0]['code'] == 130):
# Code 88 means that we were rate-limited and 130
# means Twitter is having capacity issues; either way
# just raise the error so we don't cache None and will
# try again later.
raise
else:
# It's not clear what to do in cases of other errors,
# but for now it seems reasonable to log at error
# level (so that we get notified), but then cache the
# failure to proceed with our usual work
bugdown_logger.error(traceback.format_exc())
return None
return res
HEAD_START_RE = re.compile('^head[ >]')
HEAD_END_RE = re.compile('^/head[ >]')
META_START_RE = re.compile('^meta[ >]')
META_END_RE = re.compile('^/meta[ >]')
def fetch_open_graph_image(url: str) -> Optional[Dict[str, Any]]:
in_head = False
# HTML will auto close meta tags, when we start the next tag add
# a closing tag if it has not been closed yet.
last_closed = True
head = []
# TODO: What if response content is huge? Should we get headers first?
try:
content = requests.get(url, timeout=1).text
except Exception:
return None
# Extract the head and meta tags
# All meta tags are self closing, have no children or are closed
# automatically.
for part in content.split('<'):
if not in_head and HEAD_START_RE.match(part):
# Started the head node output it to have a document root
in_head = True
head.append('<head>')
elif in_head and HEAD_END_RE.match(part):
# Found the end of the head close any remaining tag then stop
# processing
in_head = False
if not last_closed:
last_closed = True
head.append('</meta>')
head.append('</head>')
break
elif in_head and META_START_RE.match(part):
# Found a meta node copy it
if not last_closed:
head.append('</meta>')
last_closed = True
head.append('<')
head.append(part)
if '/>' not in part:
last_closed = False
elif in_head and META_END_RE.match(part):
# End of a meta node just copy it to close the tag
head.append('<')
head.append(part)
last_closed = True
try:
doc = etree.fromstring(''.join(head))
except etree.ParseError:
return None
og_image = doc.find('meta[@property="og:image"]')
og_title = doc.find('meta[@property="og:title"]')
og_desc = doc.find('meta[@property="og:description"]')
title = None
desc = None
if og_image is not None:
image = og_image.get('content')
else:
return None
if og_title is not None:
title = og_title.get('content')
if og_desc is not None:
desc = og_desc.get('content')
return {'image': image, 'title': title, 'desc': desc}
def get_tweet_id(url: str) -> Optional[str]:
parsed_url = urllib.parse.urlparse(url)
if not (parsed_url.netloc == 'twitter.com' or parsed_url.netloc.endswith('.twitter.com')):
return None
to_match = parsed_url.path
# In old-style twitter.com/#!/wdaher/status/1231241234-style URLs,
# we need to look at the fragment instead
if parsed_url.path == '/' and len(parsed_url.fragment) > 5:
to_match = parsed_url.fragment
tweet_id_match = re.match(r'^!?/.*?/status(es)?/(?P<tweetid>\d{10,30})(/photo/[0-9])?/?$', to_match)
if not tweet_id_match:
return None
return tweet_id_match.group("tweetid")
class InlineHttpsProcessor(markdown.treeprocessors.Treeprocessor):
def run(self, root: Element) -> None:
# Get all URLs from the blob
found_imgs = walk_tree(root, lambda e: e if e.tag == "img" else None)
for img in found_imgs:
url = img.get("src")
if not url.startswith("http://"):
# Don't rewrite images on our own site (e.g. emoji).
continue
img.set("src", get_camo_url(url))
class BacktickPattern(markdown.inlinepatterns.Pattern):
""" Return a `<code>` element containing the matching text. """
def __init__(self, pattern: str) -> None:
markdown.inlinepatterns.Pattern.__init__(self, pattern)
self.ESCAPED_BSLASH = '%s%s%s' % (markdown.util.STX, ord('\\'), markdown.util.ETX)
self.tag = 'code'
def handleMatch(self, m: Match[str]) -> Union[str, Element]:
if m.group(4):
el = markdown.util.etree.Element(self.tag)
# Modified to not strip whitespace
el.text = markdown.util.AtomicString(m.group(4))
return el
else:
return m.group(2).replace('\\\\', self.ESCAPED_BSLASH)
class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
TWITTER_MAX_IMAGE_HEIGHT = 400
TWITTER_MAX_TO_PREVIEW = 3
INLINE_PREVIEW_LIMIT_PER_MESSAGE = 5
def __init__(self, md: markdown.Markdown, bugdown: 'Bugdown') -> None:
# Passing in bugdown for access to config to check if realm is zulip.com
self.bugdown = bugdown
markdown.treeprocessors.Treeprocessor.__init__(self, md)
def get_actual_image_url(self, url: str) -> str:
# Add specific per-site cases to convert image-preview urls to image urls.
# See https://github.com/zulip/zulip/issues/4658 for more information
parsed_url = urllib.parse.urlparse(url)
if (parsed_url.netloc == 'github.com' or parsed_url.netloc.endswith('.github.com')):
# https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png ->
# https://raw.githubusercontent.com/zulip/zulip/master/static/images/logo/zulip-icon-128x128.png
split_path = parsed_url.path.split('/')
if len(split_path) > 3 and split_path[3] == "blob":
return urllib.parse.urljoin('https://raw.githubusercontent.com',
'/'.join(split_path[0:3] + split_path[4:]))
return url
def is_image(self, url: str) -> bool:
if not image_preview_enabled_for_realm():
return False
parsed_url = urllib.parse.urlparse(url)
# List from http://support.google.com/chromeos/bin/answer.py?hl=en&answer=183093
for ext in [".bmp", ".gif", ".jpg", "jpeg", ".png", ".webp"]:
if parsed_url.path.lower().endswith(ext):
return True
return False
def dropbox_image(self, url: str) -> Optional[Dict[str, Any]]:
# TODO: The returned Dict could possibly be a TypedDict in future.
parsed_url = urllib.parse.urlparse(url)
if (parsed_url.netloc == 'dropbox.com' or parsed_url.netloc.endswith('.dropbox.com')):
is_album = parsed_url.path.startswith('/sc/') or parsed_url.path.startswith('/photos/')
# Only allow preview Dropbox shared links
if not (parsed_url.path.startswith('/s/') or
parsed_url.path.startswith('/sh/') or
is_album):
return None
# Try to retrieve open graph protocol info for a preview
# This might be redundant right now for shared links for images.
# However, we might want to make use of title and description
# in the future. If the actual image is too big, we might also
# want to use the open graph image.
image_info = fetch_open_graph_image(url)
is_image = is_album or self.is_image(url)
# If it is from an album or not an actual image file,
# just use open graph image.
if is_album or not is_image:
# Failed to follow link to find an image preview so
# use placeholder image and guess filename
if image_info is None:
return None
image_info["is_image"] = is_image
return image_info
# Otherwise, try to retrieve the actual image.
# This is because open graph image from Dropbox may have padding
# and gifs do not work.
# TODO: What if image is huge? Should we get headers first?
if image_info is None:
image_info = dict()
image_info['is_image'] = True
parsed_url_list = list(parsed_url)
parsed_url_list[4] = "dl=1" # Replaces query
image_info["image"] = urllib.parse.urlunparse(parsed_url_list)
return image_info
return None
def youtube_id(self, url: str) -> Optional[str]:
if not image_preview_enabled_for_realm():
return None
# Youtube video id extraction regular expression from http://pastebin.com/KyKAFv1s
# If it matches, match.group(2) is the video id.
youtube_re = r'^((?:https?://)?(?:youtu\.be/|(?:\w+\.)?youtube(?:-nocookie)?\.com/)' + \
r'(?:(?:(?:v|embed)/)|(?:(?:watch(?:_popup)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v=)))' + \
r'?([0-9A-Za-z_-]+)(?(1).+)?$'
match = re.match(youtube_re, url)
if match is None:
return None
return match.group(2)
def youtube_image(self, url: str) -> Optional[str]:
yt_id = self.youtube_id(url)
if yt_id is not None:
return "https://i.ytimg.com/vi/%s/default.jpg" % (yt_id,)
return None
def vimeo_id(self, url: str) -> Optional[str]:
if not image_preview_enabled_for_realm():
return None
#(http|https)?:\/\/(www\.)?vimeo.com\/(?:channels\/(?:\w+\/)?|groups\/([^\/]*)\/videos\/|)(\d+)(?:|\/\?)
# If it matches, match.group('id') is the video id.
vimeo_re = r'^((http|https)?:\/\/(www\.)?vimeo.com\/' + \
r'(?:channels\/(?:\w+\/)?|groups\/' + \
r'([^\/]*)\/videos\/|)(\d+)(?:|\/\?))$'
match = re.match(vimeo_re, url)
if match is None:
return None
return match.group(5)
def vimeo_title(self, extracted_data: Dict[str, Any]) -> Optional[str]:
title = extracted_data.get("title")
if title is not None:
return "Vimeo - {}".format(title)
return None
def twitter_text(self, text: str,
urls: List[Dict[str, str]],
user_mentions: List[Dict[str, Any]],
media: List[Dict[str, Any]]) -> Element:
"""
Use data from the twitter API to turn links, mentions and media into A
tags. Also convert unicode emojis to images.
This works by using the urls, user_mentions and media data from
the twitter API and searching for unicode emojis in the text using
`unicode_emoji_regex`.
The first step is finding the locations of the URLs, mentions, media and
emoji in the text. For each match we build a dictionary with type, the start
location, end location, the URL to link to, and the text(codepoint and title
in case of emojis) to be used in the link(image in case of emojis).
Next we sort the matches by start location. And for each we add the
text from the end of the last link to the start of the current link to
the output. The text needs to added to the text attribute of the first
node (the P tag) or the tail the last link created.
Finally we add any remaining text to the last node.
"""
to_process = [] # type: List[Dict[str, Any]]
# Build dicts for URLs
for url_data in urls:
short_url = url_data["url"]
full_url = url_data["expanded_url"]
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
to_process.append({
'type': 'url',
'start': match.start(),
'end': match.end(),
'url': short_url,
'text': full_url,
})
# Build dicts for mentions
for user_mention in user_mentions:
screen_name = user_mention['screen_name']
mention_string = '@' + screen_name
for match in re.finditer(re.escape(mention_string), text, re.IGNORECASE):
to_process.append({
'type': 'mention',
'start': match.start(),
'end': match.end(),
'url': 'https://twitter.com/' + urllib.parse.quote(screen_name),
'text': mention_string,
})
# Build dicts for media
for media_item in media:
short_url = media_item['url']
expanded_url = media_item['expanded_url']
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
to_process.append({
'type': 'media',
'start': match.start(),
'end': match.end(),
'url': short_url,
'text': expanded_url,
})
# Build dicts for emojis
for match in re.finditer(unicode_emoji_regex, text, re.IGNORECASE):
orig_syntax = match.group('syntax')
codepoint = unicode_emoji_to_codepoint(orig_syntax)
if codepoint in codepoint_to_name:
display_string = ':' + codepoint_to_name[codepoint] + ':'
to_process.append({
'type': 'emoji',
'start': match.start(),
'end': match.end(),
'codepoint': codepoint,
'title': display_string,
})
to_process.sort(key=lambda x: x['start'])
p = current_node = markdown.util.etree.Element('p')
def set_text(text: str) -> None:
"""
Helper to set the text or the tail of the current_node
"""
if current_node == p:
current_node.text = text
else:
current_node.tail = text
current_index = 0
for item in to_process:
# The text we want to link starts in already linked text skip it
if item['start'] < current_index:
continue
# Add text from the end of last link to the start of the current
# link
set_text(text[current_index:item['start']])
current_index = item['end']
if item['type'] != 'emoji':
current_node = elem = url_to_a(item['url'], item['text'])
else:
current_node = elem = make_emoji(item['codepoint'], item['title'])
p.append(elem)
# Add any unused text
set_text(text[current_index:])
return p
def twitter_link(self, url: str) -> Optional[Element]:
tweet_id = get_tweet_id(url)
if tweet_id is None:
return None
try:
res = fetch_tweet_data(tweet_id)
if res is None:
return None
user = res['user'] # type: Dict[str, Any]
tweet = markdown.util.etree.Element("div")
tweet.set("class", "twitter-tweet")
img_a = markdown.util.etree.SubElement(tweet, 'a')
img_a.set("href", url)
img_a.set("target", "_blank")
profile_img = markdown.util.etree.SubElement(img_a, 'img')
profile_img.set('class', 'twitter-avatar')
# For some reason, for, e.g. tweet 285072525413724161,
# python-twitter does not give us a
# profile_image_url_https, but instead puts that URL in
# profile_image_url. So use _https if available, but fall
# back gracefully.
image_url = user.get('profile_image_url_https', user['profile_image_url'])
profile_img.set('src', image_url)
text = html.unescape(res['full_text'])
urls = res.get('urls', [])
user_mentions = res.get('user_mentions', [])
media = res.get('media', []) # type: List[Dict[str, Any]]
p = self.twitter_text(text, urls, user_mentions, media)
tweet.append(p)
span = markdown.util.etree.SubElement(tweet, 'span')
span.text = "- %s (@%s)" % (user['name'], user['screen_name'])
# Add image previews
for media_item in media:
# Only photos have a preview image
if media_item['type'] != 'photo':
continue
# Find the image size that is smaller than
# TWITTER_MAX_IMAGE_HEIGHT px tall or the smallest
size_name_tuples = list(media_item['sizes'].items())
size_name_tuples.sort(reverse=True,
key=lambda x: x[1]['h'])
for size_name, size in size_name_tuples:
if size['h'] < self.TWITTER_MAX_IMAGE_HEIGHT:
break
media_url = '%s:%s' % (media_item['media_url_https'], size_name)
img_div = markdown.util.etree.SubElement(tweet, 'div')
img_div.set('class', 'twitter-image')
img_a = markdown.util.etree.SubElement(img_div, 'a')
img_a.set('href', media_item['url'])
img_a.set('target', '_blank')
img_a.set('title', media_item['url'])
img = markdown.util.etree.SubElement(img_a, 'img')
img.set('src', media_url)
return tweet
except Exception:
# We put this in its own try-except because it requires external
# connectivity. If Twitter flakes out, we don't want to not-render
# the entire message; we just want to not show the Twitter preview.
bugdown_logger.warning(traceback.format_exc())
return None
def get_url_data(self, e: Element) -> Optional[Tuple[str, str]]:
if e.tag == "a":
if e.text is not None:
return (e.get("href"), e.text)
return (e.get("href"), e.get("href"))
return None
def handle_image_inlining(self, root: Element, found_url: ResultWithFamily) -> None:
grandparent = found_url.family.grandparent
parent = found_url.family.parent
ahref_element = found_url.family.child
(url, text) = found_url.result
actual_url = self.get_actual_image_url(url)
# url != text usually implies a named link, which we opt not to remove
url_eq_text = (url == text)
if parent.tag == 'li':
add_a(parent, self.get_actual_image_url(url), url, title=text)
if not parent.text and not ahref_element.tail and url_eq_text:
parent.remove(ahref_element)
elif parent.tag == 'p':
parent_index = None
for index, uncle in enumerate(grandparent.getchildren()):
if uncle is parent:
parent_index = index
break
if parent_index is not None:
ins_index = self.find_proper_insertion_index(grandparent, parent, parent_index)
add_a(grandparent, actual_url, url, title=text, insertion_index=ins_index)
else:
# We're not inserting after parent, since parent not found.
# Append to end of list of grandparent's children as normal
add_a(grandparent, actual_url, url, title=text)
# If link is alone in a paragraph, delete paragraph containing it
if (len(parent.getchildren()) == 1 and
(not parent.text or parent.text == "\n") and
not ahref_element.tail and
url_eq_text):
grandparent.remove(parent)
else:
# If none of the above criteria match, fall back to old behavior
add_a(root, actual_url, url, title=text)
def find_proper_insertion_index(self, grandparent: Element, parent: Element,
parent_index_in_grandparent: int) -> int:
# If there are several inline images from same paragraph, ensure that
# they are in correct (and not opposite) order by inserting after last
# inline image from paragraph 'parent'
uncles = grandparent.getchildren()
parent_links = [ele.attrib['href'] for ele in parent.iter(tag="a")]
insertion_index = parent_index_in_grandparent
while True:
insertion_index += 1
if insertion_index >= len(uncles):
return insertion_index
uncle = uncles[insertion_index]
inline_image_classes = ['message_inline_image', 'message_inline_ref']
if (
uncle.tag != 'div' or
'class' not in uncle.keys() or
uncle.attrib['class'] not in inline_image_classes
):
return insertion_index
uncle_link = list(uncle.iter(tag="a"))[0].attrib['href']
if uncle_link not in parent_links:
return insertion_index
def is_absolute_url(self, url: str) -> bool:
return bool(urllib.parse.urlparse(url).netloc)
def run(self, root: Element) -> None:
# Get all URLs from the blob
found_urls = walk_tree_with_family(root, self.get_url_data)
if len(found_urls) == 0 or len(found_urls) > self.INLINE_PREVIEW_LIMIT_PER_MESSAGE:
return
rendered_tweet_count = 0
for found_url in found_urls:
(url, text) = found_url.result
if not self.is_absolute_url(url):
if self.is_image(url):
self.handle_image_inlining(root, found_url)
# We don't have a strong use case for doing url preview for relative links.
continue
dropbox_image = self.dropbox_image(url)
if dropbox_image is not None:
class_attr = "message_inline_ref"
is_image = dropbox_image["is_image"]
if is_image:
class_attr = "message_inline_image"
# Not making use of title and description of images
add_a(root, dropbox_image['image'], url,
title=dropbox_image.get('title', ""),
desc=dropbox_image.get('desc', ""),
class_attr=class_attr,
use_thumbnails=False)
continue
if self.is_image(url):
self.handle_image_inlining(root, found_url)
continue
if get_tweet_id(url) is not None:
if rendered_tweet_count >= self.TWITTER_MAX_TO_PREVIEW:
# Only render at most one tweet per message
continue
twitter_data = self.twitter_link(url)
if twitter_data is None:
# This link is not actually a tweet known to twitter
continue
rendered_tweet_count += 1
div = markdown.util.etree.SubElement(root, "div")
div.set("class", "inline-preview-twitter")
div.insert(0, twitter_data)
continue
youtube = self.youtube_image(url)
if youtube is not None:
yt_id = self.youtube_id(url)
add_a(root, youtube, url, None, None,
"youtube-video message_inline_image",
yt_id, use_thumbnails=False)
continue
if arguments.db_data and arguments.db_data['sent_by_bot']:
continue
if (arguments.current_message is None
or not url_embed_preview_enabled_for_realm(arguments.current_message)):
continue
try:
extracted_data = link_preview.link_embed_data_from_cache(url)
except NotFoundInCache:
arguments.current_message.links_for_preview.add(url)
continue
if extracted_data:
vm_id = self.vimeo_id(url)
if vm_id is not None:
vimeo_image = extracted_data.get('image')
vimeo_title = self.vimeo_title(extracted_data)
if vimeo_image is not None:
add_a(root, vimeo_image, url, vimeo_title,
None, "vimeo-video message_inline_image", vm_id,
use_thumbnails=False)
if vimeo_title is not None:
found_url.family.child.text = vimeo_title
else:
add_embed(root, url, extracted_data)
class Avatar(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]:
img = markdown.util.etree.Element('img')
email_address = match.group('email')
email = email_address.strip().lower()
profile_id = None
if arguments.db_data is not None:
user_dict = arguments.db_data['email_info'].get(email)
if user_dict is not None:
profile_id = user_dict['id']
img.set('class', 'message_body_gravatar')
img.set('src', '/avatar/{0}?s=30'.format(profile_id or email))
img.set('title', email)
img.set('alt', email)
return img
def possible_avatar_emails(content: str) -> Set[str]:
emails = set()
for regex in [AVATAR_REGEX, GRAVATAR_REGEX]:
matches = re.findall(regex, content)
for email in matches:
if email:
emails.add(email)
return emails
path_to_name_to_codepoint = os.path.join(settings.STATIC_ROOT,
"generated", "emoji", "name_to_codepoint.json")
with open(path_to_name_to_codepoint) as name_to_codepoint_file:
name_to_codepoint = ujson.load(name_to_codepoint_file)
path_to_codepoint_to_name = os.path.join(settings.STATIC_ROOT,
"generated", "emoji", "codepoint_to_name.json")
with open(path_to_codepoint_to_name) as codepoint_to_name_file:
codepoint_to_name = ujson.load(codepoint_to_name_file)
# All of our emojis(non ZWJ sequences) belong to one of these unicode blocks:
# \U0001f100-\U0001f1ff - Enclosed Alphanumeric Supplement
# \U0001f200-\U0001f2ff - Enclosed Ideographic Supplement
# \U0001f300-\U0001f5ff - Miscellaneous Symbols and Pictographs
# \U0001f600-\U0001f64f - Emoticons (Emoji)
# \U0001f680-\U0001f6ff - Transport and Map Symbols
# \U0001f900-\U0001f9ff - Supplemental Symbols and Pictographs
# \u2000-\u206f - General Punctuation
# \u2300-\u23ff - Miscellaneous Technical
# \u2400-\u243f - Control Pictures
# \u2440-\u245f - Optical Character Recognition
# \u2460-\u24ff - Enclosed Alphanumerics
# \u2500-\u257f - Box Drawing
# \u2580-\u259f - Block Elements
# \u25a0-\u25ff - Geometric Shapes
# \u2600-\u26ff - Miscellaneous Symbols
# \u2700-\u27bf - Dingbats
# \u2900-\u297f - Supplemental Arrows-B
# \u2b00-\u2bff - Miscellaneous Symbols and Arrows
# \u3000-\u303f - CJK Symbols and Punctuation
# \u3200-\u32ff - Enclosed CJK Letters and Months
unicode_emoji_regex = '(?P<syntax>['\
'\U0001F100-\U0001F64F' \
'\U0001F680-\U0001F6FF' \
'\U0001F900-\U0001F9FF' \
'\u2000-\u206F' \
'\u2300-\u27BF' \
'\u2900-\u297F' \
'\u2B00-\u2BFF' \
'\u3000-\u303F' \
'\u3200-\u32FF' \
'])'
# The equivalent JS regex is \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f]|\ud83d[\ude80-\udeff]|
# \ud83e[\udd00-\uddff]|[\u2000-\u206f]|[\u2300-\u27bf]|[\u2b00-\u2bff]|[\u3000-\u303f]|
# [\u3200-\u32ff]. See below comments for explanation. The JS regex is used by marked.js for
# frontend unicode emoji processing.
# The JS regex \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f] represents U0001f100-\U0001f64f
# The JS regex \ud83d[\ude80-\udeff] represents \U0001f680-\U0001f6ff
# The JS regex \ud83e[\udd00-\uddff] represents \U0001f900-\U0001f9ff
# The JS regex [\u2000-\u206f] represents \u2000-\u206f
# The JS regex [\u2300-\u27bf] represents \u2300-\u27bf
# Similarly other JS regexes can be mapped to the respective unicode blocks.
# For more information, please refer to the following article:
# http://crocodillon.com/blog/parsing-emoji-unicode-in-javascript
def make_emoji(codepoint: str, display_string: str) -> Element:
# Replace underscore in emoji's title with space
title = display_string[1:-1].replace("_", " ")
span = markdown.util.etree.Element('span')
span.set('class', 'emoji emoji-%s' % (codepoint,))
span.set('title', title)
span.text = display_string
return span
def make_realm_emoji(src: str, display_string: str) -> Element:
elt = markdown.util.etree.Element('img')
elt.set('src', src)
elt.set('class', 'emoji')
elt.set("alt", display_string)
elt.set("title", display_string[1:-1].replace("_", " "))
return elt
def unicode_emoji_to_codepoint(unicode_emoji: str) -> str:
codepoint = hex(ord(unicode_emoji))[2:]
# Unicode codepoints are minimum of length 4, padded
# with zeroes if the length is less than zero.
while len(codepoint) < 4:
codepoint = '0' + codepoint
return codepoint
class EmoticonTranslation(markdown.inlinepatterns.Pattern):
""" Translates emoticons like `:)` into emoji like `:smile:`. """
def handleMatch(self, match: Match[str]) -> Optional[Element]:
# If there is `arguments.db_data` and it is True, proceed with translating.
if arguments.db_data is None or not arguments.db_data['translate_emoticons']:
return None
emoticon = match.group('emoticon')
translated = translate_emoticons(emoticon)
name = translated[1:-1]
return make_emoji(name_to_codepoint[name], translated)
class UnicodeEmoji(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]:
orig_syntax = match.group('syntax')
codepoint = unicode_emoji_to_codepoint(orig_syntax)
if codepoint in codepoint_to_name:
display_string = ':' + codepoint_to_name[codepoint] + ':'
return make_emoji(codepoint, display_string)
else:
return None
class Emoji(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]:
orig_syntax = match.group("syntax")
name = orig_syntax[1:-1]
active_realm_emoji = {} # type: Dict[str, Dict[str, str]]
if arguments.db_data is not None:
active_realm_emoji = arguments.db_data['active_realm_emoji']
if arguments.current_message and name in active_realm_emoji:
return make_realm_emoji(active_realm_emoji[name]['source_url'], orig_syntax)
elif name == 'zulip':
return make_realm_emoji('/static/generated/emoji/images/emoji/unicode/zulip.png', orig_syntax)
elif name in name_to_codepoint:
return make_emoji(name_to_codepoint[name], orig_syntax)
else:
return None
def content_has_emoji_syntax(content: str) -> bool:
return re.search(EMOJI_REGEX, content) is not None
class ModalLink(markdown.inlinepatterns.Pattern):
"""
A pattern that allows including in-app modal links in messages.
"""
def handleMatch(self, match: Match[str]) -> Element:
relative_url = match.group('relative_url')
text = match.group('text')
a_tag = markdown.util.etree.Element("a")
a_tag.set("href", relative_url)
a_tag.set("title", relative_url)
a_tag.text = text
return a_tag
class Tex(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Element:
rendered = render_tex(match.group('body'), is_inline=True)
if rendered is not None:
return etree.fromstring(rendered.encode('utf-8'))
else: # Something went wrong while rendering
span = markdown.util.etree.Element('span')
span.set('class', 'tex-error')
span.text = '$$' + match.group('body') + '$$'
return span
upload_title_re = re.compile("^(https?://[^/]*)?(/user_uploads/\\d+)(/[^/]*)?/[^/]*/(?P<filename>[^/]*)$")
def url_filename(url: str) -> str:
"""Extract the filename if a URL is an uploaded file, or return the original URL"""
match = upload_title_re.match(url)
if match:
return match.group('filename')
else:
return url
def fixup_link(link: markdown.util.etree.Element, target_blank: bool=True) -> None:
"""Set certain attributes we want on every link."""
if target_blank:
link.set('target', '_blank')
link.set('title', url_filename(link.get('href')))
def sanitize_url(url: str) -> Optional[str]:
"""
Sanitize a url against xss attacks.
See the docstring on markdown.inlinepatterns.LinkPattern.sanitize_url.
"""
try:
parts = urllib.parse.urlparse(url.replace(' ', '%20'))
scheme, netloc, path, params, query, fragment = parts
except ValueError:
# Bad url - so bad it couldn't be parsed.
return ''
# If there is no scheme or netloc and there is a '@' in the path,
# treat it as a mailto: and set the appropriate scheme
if scheme == '' and netloc == '' and '@' in path:
scheme = 'mailto'
elif scheme == '' and netloc == '' and len(path) > 0 and path[0] == '/':
# Allow domain-relative links
return urllib.parse.urlunparse(('', '', path, params, query, fragment))
elif (scheme, netloc, path, params, query) == ('', '', '', '', '') and len(fragment) > 0:
# Allow fragment links
return urllib.parse.urlunparse(('', '', '', '', '', fragment))
# Zulip modification: If scheme is not specified, assume http://
# We re-enter sanitize_url because netloc etc. need to be re-parsed.
if not scheme:
return sanitize_url('http://' + url)
locless_schemes = ['mailto', 'news', 'file', 'bitcoin']
if netloc == '' and scheme not in locless_schemes:
# This fails regardless of anything else.
# Return immediately to save additional processing
return None
# Upstream code will accept a URL like javascript://foo because it
# appears to have a netloc. Additionally there are plenty of other
# schemes that do weird things like launch external programs. To be
# on the safe side, we whitelist the scheme.
if scheme not in ('http', 'https', 'ftp', 'mailto', 'file', 'bitcoin'):
return None
# Upstream code scans path, parameters, and query for colon characters
# because
#
# some aliases [for javascript:] will appear to urllib.parse to have
# no scheme. On top of that relative links (i.e.: "foo/bar.html")
# have no scheme.
#
# We already converted an empty scheme to http:// above, so we skip
# the colon check, which would also forbid a lot of legitimate URLs.
# Url passes all tests. Return url as-is.
return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))
def url_to_a(url: str, text: Optional[str]=None) -> Union[Element, str]:
a = markdown.util.etree.Element('a')
href = sanitize_url(url)
target_blank = True
if href is None:
# Rejected by sanitize_url; render it as plain text.
return url
if text is None:
text = markdown.util.AtomicString(url)
href = rewrite_local_links_to_relative(href)
target_blank = not href.startswith("#narrow") and not href.startswith('mailto:')
a.set('href', href)
a.text = text
fixup_link(a, target_blank)
return a
class VerbosePattern(markdown.inlinepatterns.Pattern):
def __init__(self, pattern: str) -> None:
markdown.inlinepatterns.Pattern.__init__(self, ' ')
# HACK: we just had python-markdown compile an empty regex.
# Now replace with the real regex compiled with the flags we want.
self.pattern = pattern
self.compiled_re = re.compile("^(.*?)%s(.*?)$" % pattern,
re.DOTALL | re.UNICODE | re.VERBOSE)
class AutoLink(VerbosePattern):
def handleMatch(self, match: Match[str]) -> ElementStringNone:
url = match.group('url')
return url_to_a(url)
class UListProcessor(markdown.blockprocessors.UListProcessor):
""" Process unordered list blocks.
Based on markdown.blockprocessors.UListProcessor, but does not accept
'+' or '-' as a bullet character."""
TAG = 'ul'
RE = re.compile('^[ ]{0,3}[*][ ]+(.*)')
def __init__(self, parser: Any) -> None:
# HACK: Set the tab length to 2 just for the initialization of
# this class, so that bulleted lists (and only bulleted lists)
# work off 2-space indentation.
parser.markdown.tab_length = 2
super().__init__(parser)
parser.markdown.tab_length = 4
class ListIndentProcessor(markdown.blockprocessors.ListIndentProcessor):
""" Process unordered list blocks.
Based on markdown.blockprocessors.ListIndentProcessor, but with 2-space indent
"""
def __init__(self, parser: Any) -> None:
# HACK: Set the tab length to 2 just for the initialization of
# this class, so that bulleted lists (and only bulleted lists)
# work off 2-space indentation.
parser.markdown.tab_length = 2
super().__init__(parser)
parser.markdown.tab_length = 4
class BugdownUListPreprocessor(markdown.preprocessors.Preprocessor):
""" Allows unordered list blocks that come directly after a
paragraph to be rendered as an unordered list
Detects paragraphs that have a matching list item that comes
directly after a line of text, and inserts a newline between
to satisfy Markdown"""
LI_RE = re.compile('^[ ]{0,3}[*][ ]+(.*)', re.MULTILINE)
HANGING_ULIST_RE = re.compile('^.+\\n([ ]{0,3}[*][ ]+.*)', re.MULTILINE)
def run(self, lines: List[str]) -> List[str]:
""" Insert a newline between a paragraph and ulist if missing """
inserts = 0
fence = None
copy = lines[:]
for i in range(len(lines) - 1):
# Ignore anything that is inside a fenced code block
m = FENCE_RE.match(lines[i])
if not fence and m:
fence = m.group('fence')
elif fence and m and fence == m.group('fence'):
fence = None
# If we're not in a fenced block and we detect an upcoming list
# hanging off a paragraph, add a newline
if (not fence and lines[i] and
self.LI_RE.match(lines[i+1]) and
not self.LI_RE.match(lines[i])):
copy.insert(i+inserts+1, '')
inserts += 1
return copy
class AutoNumberOListPreprocessor(markdown.preprocessors.Preprocessor):
""" Finds a sequence of lines numbered by the same number"""
RE = re.compile(r'^([ ]*)(\d+)\.[ ]+(.*)')
TAB_LENGTH = 2
def run(self, lines: List[str]) -> List[str]:
new_lines = [] # type: List[str]
current_list = [] # type: List[Match[str]]
current_indent = 0
for line in lines:
m = self.RE.match(line)
# Remember if this line is a continuation of already started list
is_next_item = (m and current_list
and current_indent == len(m.group(1)) // self.TAB_LENGTH)
if not is_next_item:
# There is no more items in the list we were processing
new_lines.extend(self.renumber(current_list))
current_list = []
if not m:
# Ordinary line
new_lines.append(line)
elif is_next_item:
# Another list item
current_list.append(m)
else:
# First list item
current_list = [m]
current_indent = len(m.group(1)) // self.TAB_LENGTH
new_lines.extend(self.renumber(current_list))
return new_lines
def renumber(self, mlist: List[Match[str]]) -> List[str]:
if not mlist:
return []
start_number = int(mlist[0].group(2))
# Change numbers only if every one is the same
change_numbers = True
for m in mlist:
if int(m.group(2)) != start_number:
change_numbers = False
break
lines = [] # type: List[str]
counter = start_number
for m in mlist:
number = str(counter) if change_numbers else m.group(2)
lines.append('%s%s. %s' % (m.group(1), number, m.group(3)))
counter += 1
return lines
# Based on markdown.inlinepatterns.LinkPattern
class LinkPattern(markdown.inlinepatterns.Pattern):
""" Return a link element from the given match. """
def handleMatch(self, m: Match[str]) -> Optional[Element]:
href = m.group(9)
if not href:
return None
if href[0] == "<":
href = href[1:-1]
href = sanitize_url(self.unescape(href.strip()))
if href is None:
return None
href = rewrite_local_links_to_relative(href)
el = markdown.util.etree.Element('a')
el.text = m.group(2)
el.set('href', href)
fixup_link(el, target_blank=(href[:1] != '#'))
return el
def prepare_realm_pattern(source: str) -> str:
""" Augment a realm filter so it only matches after start-of-string,
whitespace, or opening delimiters, won't match if there are word
characters directly after, and saves what was matched as "name". """
return r"""(?<![^\s'"\(,:<])(?P<name>""" + source + r')(?!\w)'
# Given a regular expression pattern, linkifies groups that match it
# using the provided format string to construct the URL.
class RealmFilterPattern(markdown.inlinepatterns.Pattern):
""" Applied a given realm filter to the input """
def __init__(self, source_pattern: str,
format_string: str,
markdown_instance: Optional[markdown.Markdown]=None) -> None:
self.pattern = prepare_realm_pattern(source_pattern)
self.format_string = format_string
markdown.inlinepatterns.Pattern.__init__(self, self.pattern, markdown_instance)
def handleMatch(self, m: Match[str]) -> Union[Element, str]:
return url_to_a(self.format_string % m.groupdict(),
m.group("name"))
class UserMentionPattern(markdown.inlinepatterns.Pattern):
def handleMatch(self, m: Match[str]) -> Optional[Element]:
match = m.group(2)
if arguments.current_message and arguments.db_data is not None:
if match.startswith("**") and match.endswith("**"):
name = match[2:-2]
else:
return None
wildcard = mention.user_mention_matches_wildcard(name)
user = arguments.db_data['mention_data'].get_user(name)
if wildcard:
arguments.current_message.mentions_wildcard = True
user_id = "*"
elif user:
arguments.current_message.mentions_user_ids.add(user['id'])
name = user['full_name']
user_id = str(user['id'])
else:
# Don't highlight @mentions that don't refer to a valid user
return None
el = markdown.util.etree.Element("span")
el.set('class', 'user-mention')
el.set('data-user-id', user_id)
el.text = "@%s" % (name,)
return el
return None
class UserGroupMentionPattern(markdown.inlinepatterns.Pattern):
def handleMatch(self, m: Match[str]) -> Optional[Element]:
match = m.group(2)
if arguments.current_message and arguments.db_data is not None:
name = extract_user_group(match)
user_group = arguments.db_data['mention_data'].get_user_group(name)
if user_group:
arguments.current_message.mentions_user_group_ids.add(user_group.id)
name = user_group.name
user_group_id = str(user_group.id)
else:
# Don't highlight @-mentions that don't refer to a valid user
# group.
return None
el = markdown.util.etree.Element("span")
el.set('class', 'user-group-mention')
el.set('data-user-group-id', user_group_id)
el.text = "@%s" % (name,)
return el
return None
class StreamPattern(VerbosePattern):
def find_stream_by_name(self, name: Match[str]) -> Optional[Dict[str, Any]]:
if arguments.db_data is None:
return None
stream = arguments.db_data['stream_names'].get(name)
return stream
def handleMatch(self, m: Match[str]) -> Optional[Element]:
name = m.group('stream_name')
if arguments.current_message:
stream = self.find_stream_by_name(name)
if stream is None:
return None
el = markdown.util.etree.Element('a')
el.set('class', 'stream')
el.set('data-stream-id', str(stream['id']))
# TODO: We should quite possibly not be specifying the
# href here and instead having the browser auto-add the
# href when it processes a message with one of these, to
# provide more clarity to API clients.
stream_url = encode_stream(stream['id'], name)
el.set('href', '/#narrow/stream/{stream_url}'.format(stream_url=stream_url))
el.text = '#{stream_name}'.format(stream_name=name)
return el
return None
def possible_linked_stream_names(content: str) -> Set[str]:
matches = re.findall(STREAM_LINK_REGEX, content, re.VERBOSE)
return set(matches)
class AlertWordsNotificationProcessor(markdown.preprocessors.Preprocessor):
def run(self, lines: Iterable[str]) -> Iterable[str]:
if arguments.current_message and arguments.db_data is not None:
# We check for alert words here, the set of which are
# dependent on which users may see this message.
#
# Our caller passes in the list of possible_words. We
# don't do any special rendering; we just append the alert words
# we find to the set arguments.current_message.alert_words.
realm_words = arguments.db_data['possible_words']
content = '\n'.join(lines).lower()
allowed_before_punctuation = "|".join([r'\s', '^', r'[\(\".,\';\[\*`>]'])
allowed_after_punctuation = "|".join([r'\s', '$', r'[\)\"\?:.,\';\]!\*`]'])
for word in realm_words:
escaped = re.escape(word.lower())
match_re = re.compile('(?:%s)%s(?:%s)' %
(allowed_before_punctuation,
escaped,
allowed_after_punctuation))
if re.search(match_re, content):
arguments.current_message.alert_words.add(word)
return lines
# This prevents realm_filters from running on the content of a
# Markdown link, breaking up the link. This is a monkey-patch, but it
# might be worth sending a version of this change upstream.
class AtomicLinkPattern(LinkPattern):
def handleMatch(self, m: Match[str]) -> Optional[Element]:
ret = LinkPattern.handleMatch(self, m)
if ret is None:
return None
if not isinstance(ret, str):
ret.text = markdown.util.AtomicString(ret.text)
return ret
# These are used as keys ("realm_filters_keys") to md_engines and the respective
# realm filter caches
DEFAULT_BUGDOWN_KEY = -1
ZEPHYR_MIRROR_BUGDOWN_KEY = -2
class Bugdown(markdown.Extension):
def __init__(self, *args: Any, **kwargs: Union[bool, int, List[Any]]) -> None:
# define default configs
self.config = {
"realm_filters": [kwargs['realm_filters'],
"Realm-specific filters for realm_filters_key %s" % (kwargs['realm'],)],
"realm": [kwargs['realm'], "Realm id"],
"code_block_processor_disabled": [kwargs['code_block_processor_disabled'],
"Disabled for email gateway"]
}
super().__init__(*args, **kwargs)
def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> None:
del md.preprocessors['reference']
if self.getConfig('code_block_processor_disabled'):
del md.parser.blockprocessors['code']
for k in ('image_link', 'image_reference', 'automail',
'autolink', 'link', 'reference', 'short_reference',
'escape', 'strong_em', 'emphasis', 'emphasis2',
'linebreak', 'strong', 'backtick'):
del md.inlinePatterns[k]
try:
# linebreak2 was removed upstream in version 3.2.1, so
# don't throw an error if it is not there
del md.inlinePatterns['linebreak2']
except Exception:
pass
md.preprocessors.add("custom_text_notifications", AlertWordsNotificationProcessor(md), "_end")
# Inline code block without whitespace stripping
md.inlinePatterns.add(
"backtick",
BacktickPattern(r'(?:(?<!\\)((?:\\{2})+)(?=`+)|(?<!\\)(`+)(.+?)(?<!`)\3(?!`))'),
"_begin")
md.inlinePatterns.add(
'strong_em',
markdown.inlinepatterns.DoubleTagPattern(
r'(\*\*\*)(?!\s+)([^\*^\n]+)(?<!\s)\*\*\*', 'strong,em'),
'>backtick')
# Custom bold syntax: **foo** but not __foo__
md.inlinePatterns.add('strong',
markdown.inlinepatterns.SimpleTagPattern(r'(\*\*)([^\n]+?)\2', 'strong'),
'>not_strong')
# Custom strikethrough syntax: ~~foo~~
md.inlinePatterns.add('del',
markdown.inlinepatterns.SimpleTagPattern(
r'(?<!~)(\~\~)([^~\n]+?)(\~\~)(?!~)', 'del'), '>strong')
# str inside ** must start and end with a word character
# it need for things like "const char *x = (char *)y"
md.inlinePatterns.add(
'emphasis',
markdown.inlinepatterns.SimpleTagPattern(r'(\*)(?!\s+)([^\*^\n]+)(?<!\s)\*', 'em'),
'>strong')
for k in ('hashheader', 'setextheader', 'olist', 'ulist', 'indent'):
del md.parser.blockprocessors[k]
md.parser.blockprocessors.add('ulist', UListProcessor(md.parser), '>hr')
md.parser.blockprocessors.add('indent', ListIndentProcessor(md.parser), '<ulist')
# Original regex for blockquote is RE = re.compile(r'(^|\n)[ ]{0,3}>[ ]?(.*)')
md.parser.blockprocessors['quote'].RE = re.compile(
r'(^|\n)(?!(?:[ ]{0,3}>\s*(?:$|\n))*(?:$|\n))'
r'[ ]{0,3}>[ ]?(.*)')
# Note that !gravatar syntax should be deprecated long term.
md.inlinePatterns.add('avatar', Avatar(AVATAR_REGEX), '>backtick')
md.inlinePatterns.add('gravatar', Avatar(GRAVATAR_REGEX), '>backtick')
md.inlinePatterns.add(
'modal_link',
ModalLink(r'!modal_link\((?P<relative_url>[^)]*), (?P<text>[^)]*)\)'),
'>avatar')
md.inlinePatterns.add('usermention', UserMentionPattern(mention.find_mentions), '>backtick')
md.inlinePatterns.add('usergroupmention',
UserGroupMentionPattern(mention.user_group_mentions),
'>backtick')
md.inlinePatterns.add('stream', StreamPattern(STREAM_LINK_REGEX), '>backtick')
md.inlinePatterns.add(
'tex',
Tex(r'\B(?<!\$)\$\$(?P<body>[^\n_$](\\\$|[^$\n])*)\$\$(?!\$)\B'),
'>backtick')
md.inlinePatterns.add('emoji', Emoji(EMOJI_REGEX), '<nl')
md.inlinePatterns.add('translate_emoticons', EmoticonTranslation(emoticon_regex), '>emoji')
md.inlinePatterns.add('unicodeemoji', UnicodeEmoji(unicode_emoji_regex), '_end')
md.inlinePatterns.add('link', AtomicLinkPattern(markdown.inlinepatterns.LINK_RE, md), '>avatar')
for (pattern, format_string, id) in self.getConfig("realm_filters"):
md.inlinePatterns.add('realm_filters/%s' % (pattern,),
RealmFilterPattern(pattern, format_string), '>link')
# A link starts at a word boundary, and ends at space, punctuation, or end-of-input.
#
# We detect a url either by the `https?://` or by building around the TLD.
# In lieu of having a recursive regex (which python doesn't support) to match
# arbitrary numbers of nested matching parenthesis, we manually build a regexp that
# can match up to six
# The inner_paren_contents chunk matches the innermore non-parenthesis-holding text,
# and the paren_group matches text with, optionally, a matching set of parens
inner_paren_contents = r"[^\s()\"]*"
paren_group = r"""
[^\s()\"]*? # Containing characters that won't end the URL
(?: \( %s \) # and more characters in matched parens
[^\s()\"]*? # followed by more characters
)* # zero-or-more sets of paired parens
"""
nested_paren_chunk = paren_group
for i in range(6):
nested_paren_chunk = nested_paren_chunk % (paren_group,)
nested_paren_chunk = nested_paren_chunk % (inner_paren_contents,)
tlds = '|'.join(list_of_tlds())
link_regex = r"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
# (Double-negative lookbehind to allow start-of-string)
(?P<url> # Main group
(?:(?: # Domain part
https?://[\w.:@-]+? # If it has a protocol, anything goes.
|(?: # Or, if not, be more strict to avoid false-positives
(?:[\w-]+\.)+ # One or more domain components, separated by dots
(?:%s) # TLDs (filled in via format from tlds-alpha-by-domain.txt)
)
)
(?:/ # A path, beginning with /
%s # zero-to-6 sets of paired parens
)?) # Path is optional
| (?:[\w.-]+\@[\w.-]+\.[\w]+) # Email is separate, since it can't have a path
%s # File path start with file:///, enable by setting ENABLE_FILE_LINKS=True
| (?:bitcoin:[13][a-km-zA-HJ-NP-Z1-9]{25,34}) # Bitcoin address pattern, see https://mokagio.github.io/tech-journal/2014/11/21/regex-bitcoin.html
)
(?= # URL must be followed by (not included in group)
[!:;\?\),\.\'\"\>]* # Optional punctuation characters
(?:\Z|\s) # followed by whitespace or end of string
)
""" % (tlds, nested_paren_chunk,
r"| (?:file://(/[^/ ]*)+/?)" if settings.ENABLE_FILE_LINKS else r"")
md.inlinePatterns.add('autolink', AutoLink(link_regex), '>link')
md.preprocessors.add('hanging_ulists',
BugdownUListPreprocessor(md),
"_begin")
md.preprocessors.add('auto_number_olist',
AutoNumberOListPreprocessor(md),
"_begin")
md.treeprocessors.add("inline_interesting_links", InlineInterestingLinkProcessor(md, self), "_end")
if settings.CAMO_URI:
md.treeprocessors.add("rewrite_to_https", InlineHttpsProcessor(md), "_end")
if self.getConfig("realm") == ZEPHYR_MIRROR_BUGDOWN_KEY:
# Disable almost all inline patterns for zephyr mirror
# users' traffic that is mirrored. Note that
# inline_interesting_links is a treeprocessor and thus is
# not removed
for k in list(md.inlinePatterns.keys()):
if k not in ["autolink"]:
del md.inlinePatterns[k]
for k in list(md.treeprocessors.keys()):
if k not in ["inline_interesting_links", "inline", "rewrite_to_https"]:
del md.treeprocessors[k]
for k in list(md.preprocessors.keys()):
if k not in ["custom_text_notifications"]:
del md.preprocessors[k]
for k in list(md.parser.blockprocessors.keys()):
if k not in ["paragraph"]:
del md.parser.blockprocessors[k]
md_engines = {} # type: Dict[Tuple[int, bool], markdown.Markdown]
realm_filter_data = {} # type: Dict[int, List[Tuple[str, str, int]]]
class EscapeHtml(markdown.Extension):
def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> None:
del md.preprocessors['html_block']
del md.inlinePatterns['html']
def make_md_engine(realm_filters_key: int, email_gateway: bool) -> None:
md_engine_key = (realm_filters_key, email_gateway)
if md_engine_key in md_engines:
del md_engines[md_engine_key]
realm_filters = realm_filter_data[realm_filters_key]
md_engines[md_engine_key] = markdown.Markdown(
output_format = 'html',
extensions = [
'markdown.extensions.nl2br',
'markdown.extensions.tables',
codehilite.makeExtension(
linenums=False,
guess_lang=False
),
fenced_code.makeExtension(),
EscapeHtml(),
Bugdown(realm_filters=realm_filters,
realm=realm_filters_key,
code_block_processor_disabled=email_gateway)])
def subject_links(realm_filters_key: int, subject: str) -> List[str]:
matches = [] # type: List[str]
realm_filters = realm_filters_for_realm(realm_filters_key)
for realm_filter in realm_filters:
pattern = prepare_realm_pattern(realm_filter[0])
for m in re.finditer(pattern, subject):
matches += [realm_filter[1] % m.groupdict()]
return matches
def maybe_update_markdown_engines(realm_filters_key: Optional[int], email_gateway: bool) -> None:
# If realm_filters_key is None, load all filters
global realm_filter_data
if realm_filters_key is None:
all_filters = all_realm_filters()
all_filters[DEFAULT_BUGDOWN_KEY] = []
for realm_filters_key, filters in all_filters.items():
realm_filter_data[realm_filters_key] = filters
make_md_engine(realm_filters_key, email_gateway)
# Hack to ensure that getConfig("realm") is right for mirrored Zephyrs
realm_filter_data[ZEPHYR_MIRROR_BUGDOWN_KEY] = []
make_md_engine(ZEPHYR_MIRROR_BUGDOWN_KEY, False)
else:
realm_filters = realm_filters_for_realm(realm_filters_key)
if realm_filters_key not in realm_filter_data or \
realm_filter_data[realm_filters_key] != realm_filters:
# Realm filters data has changed, update `realm_filter_data` and any
# of the existing markdown engines using this set of realm filters.
realm_filter_data[realm_filters_key] = realm_filters
for email_gateway_flag in [True, False]:
if (realm_filters_key, email_gateway_flag) in md_engines:
# Update only existing engines(if any), don't create new one.
make_md_engine(realm_filters_key, email_gateway_flag)
if (realm_filters_key, email_gateway) not in md_engines:
# Markdown engine corresponding to this key doesn't exists so create one.
make_md_engine(realm_filters_key, email_gateway)
# We want to log Markdown parser failures, but shouldn't log the actual input
# message for privacy reasons. The compromise is to replace all alphanumeric
# characters with 'x'.
#
# We also use repr() to improve reproducibility, and to escape terminal control
# codes, which can do surprisingly nasty things.
_privacy_re = re.compile('\\w', flags=re.UNICODE)
def privacy_clean_markdown(content: str) -> str:
return repr(_privacy_re.sub('x', content))
def log_bugdown_error(msg: str) -> None:
"""We use this unusual logging approach to log the bugdown error, in
order to prevent AdminNotifyHandler from sending the santized
original markdown formatting into another Zulip message, which
could cause an infinite exception loop."""
bugdown_logger.error(msg)
def get_email_info(realm_id: int, emails: Set[str]) -> Dict[str, FullNameInfo]:
if not emails:
return dict()
q_list = {
Q(email__iexact=email.strip().lower())
for email in emails
}
rows = UserProfile.objects.filter(
realm_id=realm_id
).filter(
functools.reduce(lambda a, b: a | b, q_list),
).values(
'id',
'email',
)
dct = {
row['email'].strip().lower(): row
for row in rows
}
return dct
def get_full_name_info(realm_id: int, full_names: Set[str]) -> Dict[str, FullNameInfo]:
if not full_names:
return dict()
q_list = {
Q(full_name__iexact=full_name)
for full_name in full_names
}
rows = UserProfile.objects.filter(
realm_id=realm_id,
is_active=True,
).filter(
functools.reduce(lambda a, b: a | b, q_list),
).values(
'id',
'full_name',
'email',
)
dct = {
row['full_name'].lower(): row
for row in rows
}
return dct
class MentionData:
def __init__(self, realm_id: int, content: str) -> None:
full_names = possible_mentions(content)
self.full_name_info = get_full_name_info(realm_id, full_names)
self.user_id_info = {
row['id']: row
for row in self.full_name_info.values()
}
user_group_names = possible_user_group_mentions(content)
self.user_group_name_info = get_user_group_name_info(realm_id, user_group_names)
group_ids = [group.id for group in self.user_group_name_info.values()]
membership = UserGroupMembership.objects.filter(user_group_id__in=group_ids)
self.user_group_members = defaultdict(list) # type: Dict[int, List[int]]
for info in membership.values('user_group_id', 'user_profile_id'):
group_id = info['user_group_id']
user_profile_id = info['user_profile_id']
self.user_group_members[group_id].append(user_profile_id)
def get_user(self, name: str) -> Optional[FullNameInfo]:
return self.full_name_info.get(name.lower(), None)
def get_user_by_id(self, id: str) -> Optional[FullNameInfo]:
return self.user_id_info.get(int(id), None)
def get_user_ids(self) -> Set[int]:
"""
Returns the user IDs that might have been mentioned by this
content. Note that because this data structure has not parsed
the message and does not know about escaping/code blocks, this
will overestimate the list of user ids.
"""
return set(self.user_id_info.keys())
def get_user_group(self, name: str) -> Optional[UserGroup]:
return self.user_group_name_info.get(name.lower(), None)
def get_group_members(self, user_group_id: int) -> List[int]:
return self.user_group_members.get(user_group_id, [])
def get_user_group_name_info(realm_id: int, user_group_names: Set[str]) -> Dict[str, UserGroup]:
if not user_group_names:
return dict()
rows = UserGroup.objects.filter(realm_id=realm_id,
name__in=user_group_names)
dct = {row.name.lower(): row for row in rows}
return dct
def get_stream_name_info(realm: Realm, stream_names: Set[str]) -> Dict[str, FullNameInfo]:
if not stream_names:
return dict()
q_list = {
Q(name=name)
for name in stream_names
}
rows = get_active_streams(
realm=realm,
).filter(
functools.reduce(lambda a, b: a | b, q_list),
).values(
'id',
'name',
)
dct = {
row['name']: row
for row in rows
}
return dct
def do_convert(content: str,
message: Optional[Message]=None,
message_realm: Optional[Realm]=None,
possible_words: Optional[Set[str]]=None,
sent_by_bot: Optional[bool]=False,
mention_data: Optional[MentionData]=None,
email_gateway: Optional[bool]=False) -> str:
"""Convert Markdown to HTML, with Zulip-specific settings and hacks."""
# This logic is a bit convoluted, but the overall goal is to support a range of use cases:
# * Nothing is passed in other than content -> just run default options (e.g. for docs)
# * message is passed, but no realm is -> look up realm from message
# * message_realm is passed -> use that realm for bugdown purposes
if message is not None:
if message_realm is None:
message_realm = message.get_realm()
if message_realm is None:
realm_filters_key = DEFAULT_BUGDOWN_KEY
else:
realm_filters_key = message_realm.id
if (message is not None and message.sender.realm.is_zephyr_mirror_realm and
message.sending_client.name == "zephyr_mirror"):
# Use slightly customized Markdown processor for content
# delivered via zephyr_mirror
realm_filters_key = ZEPHYR_MIRROR_BUGDOWN_KEY
maybe_update_markdown_engines(realm_filters_key, email_gateway)
md_engine_key = (realm_filters_key, email_gateway)
if md_engine_key in md_engines:
_md_engine = md_engines[md_engine_key]
else:
if DEFAULT_BUGDOWN_KEY not in md_engines:
maybe_update_markdown_engines(realm_filters_key=None, email_gateway=False)
_md_engine = md_engines[(DEFAULT_BUGDOWN_KEY, email_gateway)]
# Reset the parser; otherwise it will get slower over time.
_md_engine.reset()
arguments.current_message = message
# Pre-fetch data from the DB that is used in the bugdown thread
if message is not None:
assert message_realm is not None # ensured above if message is not None
if possible_words is None:
possible_words = set() # Set[str]
# Here we fetch the data structures needed to render
# mentions/avatars/stream mentions from the database, but only
# if there is syntax in the message that might use them, since
# the fetches are somewhat expensive and these types of syntax
# are uncommon enough that it's a useful optimization.
if mention_data is None:
mention_data = MentionData(message_realm.id, content)
emails = possible_avatar_emails(content)
email_info = get_email_info(message_realm.id, emails)
stream_names = possible_linked_stream_names(content)
stream_name_info = get_stream_name_info(message_realm, stream_names)
if content_has_emoji_syntax(content):
active_realm_emoji = message_realm.get_active_emoji()
else:
active_realm_emoji = dict()
arguments.db_data = {
'possible_words': possible_words,
'email_info': email_info,
'mention_data': mention_data,
'active_realm_emoji': active_realm_emoji,
'realm_uri': message_realm.uri,
'sent_by_bot': sent_by_bot,
'stream_names': stream_name_info,
'translate_emoticons': message.sender.translate_emoticons,
}
try:
# Spend at most 5 seconds rendering; this protects the backend
# from being overloaded by bugs (e.g. markdown logic that is
# extremely inefficient in corner cases) as well as user
# errors (e.g. a realm filter that makes some syntax
# infinite-loop).
rendered_content = timeout(5, _md_engine.convert, content)
# Throw an exception if the content is huge; this protects the
# rest of the codebase from any bugs where we end up rendering
# something huge.
if len(rendered_content) > MAX_MESSAGE_LENGTH * 10:
raise BugdownRenderingException('Rendered content exceeds %s characters' %
(MAX_MESSAGE_LENGTH * 10,))
return rendered_content
except Exception:
cleaned = privacy_clean_markdown(content)
# NOTE: Don't change this message without also changing the
# logic in logging_handlers.py or we can create recursive
# exceptions.
exception_message = ('Exception in Markdown parser: %sInput (sanitized) was: %s'
% (traceback.format_exc(), cleaned))
bugdown_logger.exception(exception_message)
raise BugdownRenderingException()
finally:
arguments.current_message = None
arguments.db_data = None
bugdown_time_start = 0.0
bugdown_total_time = 0.0
bugdown_total_requests = 0
def get_bugdown_time() -> float:
return bugdown_total_time
def get_bugdown_requests() -> int:
return bugdown_total_requests
def bugdown_stats_start() -> None:
global bugdown_time_start
bugdown_time_start = time.time()
def bugdown_stats_finish() -> None:
global bugdown_total_time
global bugdown_total_requests
global bugdown_time_start
bugdown_total_requests += 1
bugdown_total_time += (time.time() - bugdown_time_start)
def convert(content: str,
message: Optional[Message]=None,
message_realm: Optional[Realm]=None,
possible_words: Optional[Set[str]]=None,
sent_by_bot: Optional[bool]=False,
mention_data: Optional[MentionData]=None,
email_gateway: Optional[bool]=False) -> str:
bugdown_stats_start()
ret = do_convert(content, message, message_realm,
possible_words, sent_by_bot, mention_data, email_gateway)
bugdown_stats_finish()
return ret
| 41.184783
| 162
| 0.601142
|
713593e0365f57179ba46f50240b9e9705609784
| 1,776
|
py
|
Python
|
TweetNormalizer.py
|
VinAIResearch/BERTweet
|
c595d21749591ca43ddcda66de0facd3a14ec23b
|
[
"MIT"
] | 437
|
2020-05-21T04:42:57.000Z
|
2022-03-23T09:10:47.000Z
|
TweetNormalizer.py
|
VinAIResearch/BERTweet
|
c595d21749591ca43ddcda66de0facd3a14ec23b
|
[
"MIT"
] | 34
|
2020-05-22T07:56:22.000Z
|
2022-03-27T02:42:59.000Z
|
TweetNormalizer.py
|
VinAIResearch/BERTweet
|
c595d21749591ca43ddcda66de0facd3a14ec23b
|
[
"MIT"
] | 48
|
2020-05-21T12:20:45.000Z
|
2022-03-31T20:24:04.000Z
|
from emoji import demojize
from nltk.tokenize import TweetTokenizer
tokenizer = TweetTokenizer()
def normalizeToken(token):
lowercased_token = token.lower()
if token.startswith("@"):
return "@USER"
elif lowercased_token.startswith("http") or lowercased_token.startswith("www"):
return "HTTPURL"
elif len(token) == 1:
return demojize(token)
else:
if token == "’":
return "'"
elif token == "…":
return "..."
else:
return token
def normalizeTweet(tweet):
tokens = tokenizer.tokenize(tweet.replace("’", "'").replace("…", "..."))
normTweet = " ".join([normalizeToken(token) for token in tokens])
normTweet = (
normTweet.replace("cannot ", "can not ")
.replace("n't ", " n't ")
.replace("n 't ", " n't ")
.replace("ca n't", "can't")
.replace("ai n't", "ain't")
)
normTweet = (
normTweet.replace("'m ", " 'm ")
.replace("'re ", " 're ")
.replace("'s ", " 's ")
.replace("'ll ", " 'll ")
.replace("'d ", " 'd ")
.replace("'ve ", " 've ")
)
normTweet = (
normTweet.replace(" p . m .", " p.m.")
.replace(" p . m ", " p.m ")
.replace(" a . m .", " a.m.")
.replace(" a . m ", " a.m ")
)
return " ".join(normTweet.split())
if __name__ == "__main__":
print(
normalizeTweet(
"SC has first two presumptive cases of coronavirus, DHEC confirms https://postandcourier.com/health/covid19/sc-has-first-two-presumptive-cases-of-coronavirus-dhec-confirms/article_bddfe4ae-5fd3-11ea-9ce4-5f495366cee6.html?utm_medium=social&utm_source=twitter&utm_campaign=user-share… via @postandcourier"
)
)
| 29.6
| 316
| 0.548423
|
dbb5e8844df4afef586cc88635bfe159dc6d3598
| 6,958
|
py
|
Python
|
training-script/register/register_model.py
|
mkoivi-ms/MLOps-healthcare
|
890ea9e20e99fe4f88908e6dd3b645b42768fd6f
|
[
"MIT"
] | null | null | null |
training-script/register/register_model.py
|
mkoivi-ms/MLOps-healthcare
|
890ea9e20e99fe4f88908e6dd3b645b42768fd6f
|
[
"MIT"
] | null | null | null |
training-script/register/register_model.py
|
mkoivi-ms/MLOps-healthcare
|
890ea9e20e99fe4f88908e6dd3b645b42768fd6f
|
[
"MIT"
] | null | null | null |
"""
Copyright (C) Microsoft Corporation. All rights reserved.
Microsoft Corporation (“Microsoft”) grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. This license does not purport to express any claim of ownership over
data you may have shared with Microsoft in the creation of the Software Code.
Unless applicable law gives you more rights, Microsoft reserves all other
rights not expressly granted herein, whether by implication, estoppel or
otherwise.
THE SOFTWARE CODE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
MICROSOFT OR ITS LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE CODE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import json
import os
import sys
import argparse
import traceback
import joblib
from azureml.core import Run, Experiment, Workspace, Dataset
from azureml.core.model import Model as AMLModel
def main():
run = Run.get_context()
if (run.id.startswith('OfflineRun')):
from dotenv import load_dotenv
# For local development, set values in this section
load_dotenv()
workspace_name = os.environ.get("WORKSPACE_NAME")
experiment_name = os.environ.get("EXPERIMENT_NAME")
resource_group = os.environ.get("RESOURCE_GROUP")
subscription_id = os.environ.get("SUBSCRIPTION_ID")
# run_id useful to query previous runs
run_id = "bd184a18-2ac8-4951-8e78-e290bef3b012"
aml_workspace = Workspace.get(
name=workspace_name,
subscription_id=subscription_id,
resource_group=resource_group
)
ws = aml_workspace
exp = Experiment(ws, experiment_name)
else:
ws = run.experiment.workspace
exp = run.experiment
run_id = 'amlcompute'
parser = argparse.ArgumentParser("register")
parser.add_argument(
"--run_id",
type=str,
help="Training run ID",
)
parser.add_argument(
"--model_name",
type=str,
help="Name of the Model",
default="diabetes_model.pkl",
)
parser.add_argument(
"--step_input",
type=str,
help=("input from previous steps")
)
args = parser.parse_args()
if (args.run_id is not None):
run_id = args.run_id
if (run_id == 'amlcompute'):
run_id = run.parent.id
model_name = args.model_name
model_path = args.step_input
print("Getting registration parameters")
# Load the registration parameters from the parameters file
with open("parameters.json") as f:
pars = json.load(f)
try:
register_args = pars["registration"]
except KeyError:
print("Could not load registration values from file")
register_args = {"tags": []}
model_tags = {}
for tag in register_args["tags"]:
try:
mtag = run.parent.get_metrics()[tag]
model_tags[tag] = mtag
except KeyError:
print(f"Could not find {tag} metric on parent run.")
# load the model
print("Loading model from " + model_path)
model_file = os.path.join(model_path, model_name)
model = joblib.load(model_file)
parent_tags = run.parent.get_tags()
try:
build_id = parent_tags["BuildId"]
except KeyError:
build_id = None
print("BuildId tag not found on parent run.")
print(f"Tags present: {parent_tags}")
try:
build_uri = parent_tags["BuildUri"]
except KeyError:
build_uri = None
print("BuildUri tag not found on parent run.")
print(f"Tags present: {parent_tags}")
if (model is not None):
dataset_id = parent_tags["dataset_id"]
if (build_id is None):
register_aml_model(
model_file,
model_name,
model_tags,
exp,
run_id,
dataset_id)
elif (build_uri is None):
register_aml_model(
model_file,
model_name,
model_tags,
exp,
run_id,
dataset_id,
build_id)
else:
register_aml_model(
model_file,
model_name,
model_tags,
exp,
run_id,
dataset_id,
build_id,
build_uri)
else:
print("Model not found. Skipping model registration.")
sys.exit(0)
def model_already_registered(model_name, exp, run_id):
model_list = AMLModel.list(exp.workspace, name=model_name, run_id=run_id)
if len(model_list) >= 1:
e = ("Model name:", model_name, "in workspace",
exp.workspace, "with run_id ", run_id, "is already registered.")
print(e)
raise Exception(e)
else:
print("Model is not registered for this run.")
def register_aml_model(
model_path,
model_name,
model_tags,
exp,
run_id,
dataset_id,
build_id: str = 'none',
build_uri=None
):
try:
tagsValue = {"area": "safe-drive",
"run_id": run_id,
"experiment_name": exp.name}
tagsValue.update(model_tags)
if (build_id != 'none'):
model_already_registered(model_name, exp, run_id)
tagsValue["BuildId"] = build_id
if (build_uri is not None):
tagsValue["BuildUri"] = build_uri
model = AMLModel.register(
workspace=exp.workspace,
model_name=model_name,
model_path=model_path,
tags=tagsValue,
datasets=[('training data',
Dataset.get_by_id(exp.workspace, dataset_id))])
os.chdir("..")
print(
"Model registered: {} \nModel Description: {} "
"\nModel Version: {}".format(
model.name, model.description, model.version
)
)
except Exception:
traceback.print_exc(limit=None, file=None, chain=True)
print("Model registration failed")
raise
if __name__ == '__main__':
main()
| 32.362791
| 79
| 0.620006
|
a55dc64852fa3fade509a24b21b3ff6734c90f84
| 3,282
|
py
|
Python
|
setup.py
|
mikelolasagasti/insights-core
|
68fbeba7294a33dd170ac4d1e73715d5bb68b702
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
mikelolasagasti/insights-core
|
68fbeba7294a33dd170ac4d1e73715d5bb68b702
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
mikelolasagasti/insights-core
|
68fbeba7294a33dd170ac4d1e73715d5bb68b702
|
[
"Apache-2.0"
] | null | null | null |
import os
from setuptools import setup, find_packages
__here__ = os.path.dirname(os.path.abspath(__file__))
package_info = dict.fromkeys(["RELEASE", "COMMIT", "VERSION", "NAME"])
for name in package_info:
with open(os.path.join(__here__, "insights", name)) as f:
package_info[name] = f.read().strip()
entry_points = {
'console_scripts': [
'insights-run = insights:main',
'insights-info = insights.tools.query:main',
'gen_api = insights.tools.generate_api_config:main',
'insights-perf = insights.tools.perf:main',
'client = insights.client:run',
'mangle = insights.util.mangle:main'
]
}
runtime = set([
'pyyaml>=3.10,<=3.13',
'six',
'requests',
'redis',
'cachecontrol',
'cachecontrol[redis]',
'cachecontrol[filecache]',
'lockfile',
])
def maybe_require(pkg):
try:
__import__(pkg)
except ImportError:
runtime.add(pkg)
maybe_require("importlib")
maybe_require("argparse")
client = set([
'requests',
'pyOpenSSL',
])
develop = set([
'futures==3.0.5',
'wheel',
])
docs = set([
'Sphinx==1.7.9',
'nbsphinx==0.3.1',
'sphinx_rtd_theme',
'ipython<6',
'colorama',
'jinja2',
])
testing = set([
'coverage==4.3.4',
'pytest==3.0.6',
'pytest-cov==2.4.0',
'mock==2.0.0',
])
cluster = set([
'ansible',
'pandas',
'jinja2',
'colorama',
])
linting = set([
'flake8==2.6.2',
])
optional = set([
'python-cjson',
'python-logstash',
'python-statsd',
'watchdog',
])
if __name__ == "__main__":
# allows for runtime modification of rpm name
name = os.environ.get("INSIGHTS_CORE_NAME", package_info["NAME"])
setup(
name=name,
version=package_info["VERSION"],
description="Insights Core is a data collection and analysis framework",
long_description=open("README.rst").read(),
url="https://github.com/redhatinsights/insights-core",
author="Red Hat, Inc.",
author_email="insights@redhat.com",
packages=find_packages(),
install_requires=list(runtime),
package_data={'': ['LICENSE']},
license='Apache 2.0',
extras_require={
'develop': list(runtime | develop | client | docs | linting | testing | cluster),
'client': list(runtime | client),
'cluster': list(runtime | cluster),
'optional': list(optional),
'docs': list(docs),
'linting': list(linting | client),
'testing': list(testing | client)
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
entry_points=entry_points,
include_package_data=True
)
| 25.053435
| 93
| 0.574954
|
1a73443de0e4c3b08a0f319411daf0a4652ce6af
| 275
|
py
|
Python
|
paddle2onnx/version.py
|
PaddlePaddle/paddle2onnx
|
9773ccdfc19ddc0cab2fbdd827145adb21d78b51
|
[
"Apache-2.0"
] | 95
|
2019-09-27T14:26:59.000Z
|
2020-12-08T01:20:28.000Z
|
paddle2onnx/version.py
|
PaddlePaddle/paddle-onnx
|
abbbdd606dad8bfcbf30f5d8892f888f6843b069
|
[
"Apache-2.0"
] | 51
|
2018-04-04T22:39:30.000Z
|
2019-08-28T20:19:14.000Z
|
paddle2onnx/version.py
|
PaddlePaddle/paddle2onnx
|
9773ccdfc19ddc0cab2fbdd827145adb21d78b51
|
[
"Apache-2.0"
] | 22
|
2019-09-03T08:50:04.000Z
|
2020-12-02T11:05:42.000Z
|
# This file is generated by setup.py. DO NOT EDIT!
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
version = '0.9.5'
git_version = '9c50d6d3db74bbd7057b291a53d6da004436740e'
| 34.375
| 56
| 0.84
|
0610d7c81e524c3b04e6004f7c4e414522fdb1ca
| 10,705
|
py
|
Python
|
src/python/dart/message/trigger_listener.py
|
RetailMeNotSandbox/dart
|
58a05f56c04fadd6741501262d92aeb143cd2f2e
|
[
"MIT"
] | 18
|
2016-03-03T19:10:21.000Z
|
2021-07-14T22:37:35.000Z
|
src/python/dart/message/trigger_listener.py
|
RetailMeNotSandbox/dart
|
58a05f56c04fadd6741501262d92aeb143cd2f2e
|
[
"MIT"
] | 62
|
2016-04-11T15:17:23.000Z
|
2017-09-08T17:18:53.000Z
|
src/python/dart/message/trigger_listener.py
|
RetailMeNotSandbox/dart
|
58a05f56c04fadd6741501262d92aeb143cd2f2e
|
[
"MIT"
] | 15
|
2016-03-03T15:38:34.000Z
|
2019-03-27T19:33:08.000Z
|
import json
import logging
import traceback
from dart.context.locator import injectable
from dart.message.call import TriggerCall
from dart.model.action import ActionState, OnFailure as ActionOnFailure, Action
from dart.model.datastore import DatastoreState
from dart.model.query import Filter, Operator
from dart.model.workflow import WorkflowInstanceState, WorkflowState, OnFailure as WorkflowOnFailure
from dart.trigger.subscription import subscription_batch_trigger
from dart.trigger.super import super_trigger
_logger = logging.getLogger(__name__)
@injectable
class TriggerListener(object):
def __init__(self, trigger_broker, trigger_proxy, trigger_service, action_service, datastore_service,
workflow_service, emailer, subscription_element_service, subscription_service):
self._trigger_broker = trigger_broker
self._trigger_proxy = trigger_proxy
self._trigger_service = trigger_service
self._action_service = action_service
self._datastore_service = datastore_service
self._workflow_service = workflow_service
self._emailer = emailer
self._subscription_element_service = subscription_element_service
self._subscription_service = subscription_service
self._handlers = {
TriggerCall.PROCESS_TRIGGER: self._handle_process_trigger,
TriggerCall.TRY_NEXT_ACTION: self._handle_try_next_action,
TriggerCall.COMPLETE_ACTION: self._handle_complete_action,
}
self._trigger_processors = {
name: p.evaluate_message for name, p in trigger_service.trigger_processors().iteritems()
}
def await_call(self, wait_time_seconds=20):
self._trigger_broker.receive_message(self._handle_call, wait_time_seconds)
def _handle_call(self, message_id, message, previous_handler_failed):
# CloudWatch Events (scheduled trigger) look like this, and need to be deserialized:
if 'Message' in message and 'MessageId' in message:
message = json.loads(message['Message'])
call = message['call']
if call not in self._handlers:
raise Exception('no handler defined for call: %s' % call)
handler = self._handlers[call]
try:
handler(message_id, message, previous_handler_failed)
except Exception:
_logger.error(json.dumps(traceback.format_exc()))
def _handle_process_trigger(self, message_id, message, previous_handler_failed):
_logger.info("Processing Trigger: message_id={message_id}, message={message}".format(message_id=message_id, message=message))
if previous_handler_failed:
_logger.error('previous handler for message id={message_id} failed... see if retrying is possible. message={message}'.\
format(message_id=message_id, message=message))
return
trigger_type_name = message['trigger_type_name']
if trigger_type_name not in self._trigger_processors:
raise Exception('no handler defined for trigger_type_name: {trigger_name}. message_id={message_id}. message={message}'.\
format(trigger_name=trigger_type_name, message_id=message_id, message=message))
handler = self._trigger_processors[trigger_type_name]
try:
executed_trigger_ids = handler(message['message'], self._trigger_service)
for trigger_id in executed_trigger_ids or []:
self._trigger_proxy.super_trigger_evaluation(trigger_id)
except Exception:
_logger.error(json.dumps(traceback.format_exc()))
def _handle_try_next_action(self, message_id, message, previous_handler_failed):
if previous_handler_failed:
_logger.error('previous handler for message id={message_id} failed... see if retrying is possible. message={message}'.format(message_id=message_id, message=message))
return
_logger.info("Next Action Trigger: message_id={message_id}, message={message}".format(message_id=message_id, message=message))
datastore = self._datastore_service.get_datastore(message['datastore_id'])
running_or_queued_workflow_ids = self._action_service.find_running_or_queued_action_workflow_ids(datastore.id)
exists_non_workflow_action = self._action_service.exists_running_or_queued_non_workflow_action(datastore.id)
next_action = self._action_service.find_next_runnable_action(
datastore_id=datastore.id,
not_in_workflow_ids=running_or_queued_workflow_ids,
ensure_workflow_action=exists_non_workflow_action
)
if not next_action:
err_msg = 'datastore (id={datastore_id}) has no actions that can be run at this time. '
err_msg = err_msg + 'Datastore workflows running/queued = {running_queued}. '
err_msg = err_msg + 'exists_non_workflow_action={exists_non_workflow_action}. message={message}.'
_logger.error(err_msg.format(datastore_id=datastore.id,
running_queued=running_or_queued_workflow_ids,
exists_non_workflow_action=exists_non_workflow_action,
message=message))
return
assert isinstance(next_action, Action)
if next_action.data.action_type_name == 'consume_subscription':
self._subscription_element_service.assign_subscription_elements(next_action)
self._action_service.update_action_state(next_action, ActionState.QUEUED, next_action.data.error_message)
def _handle_complete_action(self, message_id, message, previous_handler_failed):
if previous_handler_failed:
_logger.error('previous handler for message id=%s failed... see if retrying is possible' % message_id)
return
_logger.info("Complete Action Trigger: message_id={message_id}, message={message}".format(message_id=message_id, message=message))
state = message['action_state']
action = self._action_service.get_action(message['action_id'])
assert isinstance(action, Action)
datastore = self._datastore_service.get_datastore(action.data.datastore_id)
error_message = message.get('error_message')
self._action_service.update_action_state(action, state, error_message or action.data.error_message)
wfid = action.data.workflow_id
wfiid = action.data.workflow_instance_id
wf = self._workflow_service.get_workflow(wfid) if wfid else None
wfi = self._workflow_service.get_workflow_instance(wfiid) if wfiid else None
callbacks = []
try_next_action = True
try:
if state == ActionState.FAILED:
callbacks.append(lambda: self._emailer.send_action_failed_email(action, datastore))
if action.data.on_failure in [ActionOnFailure.DEACTIVATE, ActionOnFailure.HALT]:
try_next_action = False
if wf and wfi:
self._workflow_service.update_workflow_instance_state(wfi, WorkflowInstanceState.FAILED)
if wfi.data.retry_num < wf.data.retries_on_failure:
retry_num = wfi.data.retry_num + 1
callbacks.append(lambda: self._trigger_proxy.trigger_workflow_retry(wfid, retry_num))
else:
# if we halt we should not mark the workflow as inactive.
if action.data.on_failure == ActionOnFailure.DEACTIVATE:
self._workflow_service.update_workflow_state(wf, WorkflowState.INACTIVE)
if wf.data.on_failure == WorkflowOnFailure.DEACTIVATE:
self._datastore_service.update_datastore_state(datastore, DatastoreState.INACTIVE)
f1 = Filter('workflow_instance_id', Operator.EQ, wfiid)
f2 = Filter('state', Operator.EQ, ActionState.HAS_NEVER_RUN)
for a in self._action_service.query_actions_all(filters=[f1, f2]):
error_msg = 'A prior action (id=%s) in this workflow instance failed' % action.id
self._action_service.update_action_state(a, ActionState.SKIPPED, error_msg)
callbacks.append(lambda: self._emailer.send_workflow_failed_email(wf, wfi))
else:
self._datastore_service.update_datastore_state(datastore, DatastoreState.INACTIVE)
else:
if wfi and action.data.last_in_workflow:
self._handle_complete_workflow(callbacks, wf, wfi, wfid)
elif state == ActionState.COMPLETED:
if action.data.on_success_email:
callbacks.append(lambda: self._emailer.send_action_completed_email(action, datastore))
if wfi and action.data.last_in_workflow:
self._handle_complete_workflow(callbacks, wf, wfi, wfid)
finally:
for f in callbacks:
f()
# All actions in a wf are kicked off in batch at once
# if try_next_action:
# self._trigger_proxy.try_next_action({'datastore_id': datastore.id, 'log_info': message.get('log_info')})
def _handle_complete_workflow(self, callbacks, wf, wfi, wfid):
self._workflow_service.update_workflow_instance_state(wfi, WorkflowInstanceState.COMPLETED)
_logger.info("Trigger Workflow completion: id={wfid}".format(wfid=wfid))
self._trigger_proxy.trigger_workflow_completion(wfid)
self._trigger_subscription_evaluations(wfi.data.trigger_id)
if wf.data.on_success_email:
callbacks.append(lambda: self._emailer.send_workflow_completed_email(wf, wfi))
def _trigger_subscription_evaluations(self, trigger_id):
if not trigger_id:
return
trigger = self._trigger_service.get_trigger(trigger_id, raise_when_missing=False)
if not trigger:
return
if trigger.data.trigger_type_name == subscription_batch_trigger.name:
sub = self._subscription_service.get_subscription(trigger.data.args['subscription_id'], raise_when_missing=False)
if sub and not sub.data.nudge_id:
self._trigger_proxy.trigger_subscription_evaluation(trigger.id)
if trigger.data.trigger_type_name == super_trigger.name:
for ctid in trigger.data.args['completed_trigger_ids']:
self._trigger_subscription_evaluations(ctid)
| 56.342105
| 177
| 0.682765
|
43da7c2f1fed6ff3087fd9b45988ffb8c9180335
| 4,767
|
py
|
Python
|
src/deploy/auto_common/constants.py
|
kamlesh6808/JetPack
|
54871b307cc7385939dd89f9bb5e9ed9bb3036fe
|
[
"Apache-2.0"
] | 31
|
2017-11-14T02:21:18.000Z
|
2022-02-16T07:28:54.000Z
|
src/deploy/auto_common/constants.py
|
kamlesh6808/JetPack
|
54871b307cc7385939dd89f9bb5e9ed9bb3036fe
|
[
"Apache-2.0"
] | 324
|
2018-01-10T16:52:25.000Z
|
2021-09-23T19:02:50.000Z
|
src/deploy/auto_common/constants.py
|
kamlesh6808/JetPack
|
54871b307cc7385939dd89f9bb5e9ed9bb3036fe
|
[
"Apache-2.0"
] | 215
|
2017-11-01T15:50:16.000Z
|
2022-02-16T07:28:56.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2015-2021 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DPDK_K = "dpdk"
BOTH = "both"
OVS_DPDK = "OvsDpdk"
SRIOV_K = "sriov"
SRIOV = "Sriov"
NFV_TYPE_MAP = {DPDK_K: OVS_DPDK, SRIOV_K: SRIOV,
BOTH: BOTH}
DPDK_SERVICES = ["ComputeNeutronOvsDpdk", "OvsDpdkNetcontrold", "Rear"]
SRIOV_SERVICES = ["NeutronSriovAgent", "NeutronSriovHostConfig", "Rear"]
CTLPLANE_BRIDGE = "br-ctlplane"
PUBLIC_API_IF = "enp1s0"
PROVISIONING_IF = "enp2s0"
MANAGEMENT_IF = "enp3s0"
PRIVATE_API_IF = "enp4s0"
FIRST_BOOT = "first-boot"
SITE_NAME = "site-name"
OVERRIDES = "overrides"
CONTROL_PLANE_EXPORT = "control-plane-export"
TEMPEST_CONF = 'tempest.conf'
OVERCLOUD_PATH = 'overcloud'
OVERCLOUD_ENVS_PATH = OVERCLOUD_PATH + '/environments'
EDGE_COMMON_PATH = "edge_common"
STAGING_PATH = '/deployment_staging'
STAGING_TEMPLATES_PATH = STAGING_PATH + '/templates'
NIC_CONFIGS = 'nic-configs'
IMAGES_ENV = 'images-env'
CONTAINERS_PREPARE_PARAM = 'containers-prepare-parameter'
STAGING_NIC_CONFIGS = STAGING_TEMPLATES_PATH + '/' + NIC_CONFIGS
NIC_ENV = 'nic_environment'
NODE_PLACEMENT = 'node-placement'
NEUTRON_OVS = 'neutron-ovs'
DELL_ENV = 'dell-environment'
NET_ENV = 'network-environment'
INSTACKENV = 'instackenv'
STATIC_IP_ENV = 'static-ip-environment'
STATIC_VIP_ENV = 'static-vip-environment'
ROLES_DATA = 'roles_data'
NETWORK_DATA = 'network_data'
NET_ISO = 'network-isolation'
CONTROLLER = 'controller'
NEUTRON_OVS_DPDK = "neutron-ovs-dpdk"
NEUTRON_SRIOV = "neutron-sriov"
DEF_COMPUTE_ROLE_FILE = 'DistributedCompute.yaml'
DEF_COMPUTE_REMOTE_PATH = ('roles/{}'.format(DEF_COMPUTE_ROLE_FILE))
CONTROL_PLANE_NET = ('ControlPlane', "ctlplane")
INTERNAL_API_NET = ('InternalApi', 'internal_api')
STORAGE_NET = ('Storage', 'storage')
TENANT_NET = ('Tenant', 'tenant')
EXTERNAL_NET = ('External', 'external')
EDGE_NETWORKS = (INTERNAL_API_NET, STORAGE_NET,
TENANT_NET, EXTERNAL_NET)
EDGE_VLANS = ["TenantNetworkVlanID", "InternalApiNetworkVlanID",
"StorageNetworkVlanID"]
# Jinja2 template constants
J2_EXT = '.j2.yaml'
NIC_ENV_EDGE_J2 = NIC_ENV + "_edge" + J2_EXT
EDGE_COMPUTE_J2 = 'compute_edge' + J2_EXT
CONTROLLER_J2 = CONTROLLER + J2_EXT
NETWORK_DATA_J2 = NETWORK_DATA + J2_EXT
NETWORK_ENV_EDGE_J2 = NET_ENV + "-edge" + J2_EXT
DELL_ENV_EDGE_J2 = DELL_ENV + "-edge" + J2_EXT
STATIC_IP_ENV_EDGE_J2 = STATIC_IP_ENV + "-edge" + J2_EXT
NODE_PLACEMENT_EDGE_J2 = NODE_PLACEMENT + "-edge" + J2_EXT
ROLES_DATA_EDGE_J2 = ROLES_DATA + "_edge" + J2_EXT
NET_ISO_EDGE_J2 = NET_ISO + "-edge" + J2_EXT
SITE_NAME_EDGE_J2 = SITE_NAME + "-edge" + J2_EXT
SITE_NAME_J2 = SITE_NAME + J2_EXT
OVERRIDES_EDGE_J2 = OVERRIDES + "-edge" + J2_EXT
NEUTRON_OVS_DPDK_EDGE_J2 = NEUTRON_OVS_DPDK + "-edge" + J2_EXT
NEUTRON_SRIOV_EDGE_J2 = NEUTRON_SRIOV + "-edge" + J2_EXT
EC2_IPCIDR = '169.254.169.254/32'
EC2_PUBLIC_IPCIDR_PARAM = 'EC2MetadataPublicIpCidr'
NWM_ROUTE_CMD = ("nmcli connection modify {dev} {add_rem}ipv4.routes "
"\"{cidr} {gw}\"")
NWM_UP_CMD = "nmcli connection load {dev} && exec nmcli device reapply {dev}"
LEGACY_DEL_ROUTE_CMD = ("sudo sed -i -e '/{cidr_esc} via {gw} dev {dev}/d' "
"/etc/sysconfig/network-scripts/route-{dev}; "
"sudo ip route del {cidr} via {gw} dev {dev}")
LEGACY_ROUTE_CMD = ("sudo echo \"{cidr} via {gw} dev {dev}\" >> "
"/etc/sysconfig/network-scripts/route-{dev}")
LEGACY_SSH_ROUTE_CMD = ("echo \"{cidr} via {gw} dev {dev}\" | sudo tee -a "
"/etc/sysconfig/network-scripts/route-{dev}")
ROUTE_UP_CMD = "sudo /etc/sysconfig/network-scripts/ifup-routes {dev}"
BR_DOWN_CMD = "sudo /etc/sysconfig/network-scripts/ifdown-ovs ifcfg-{dev}"
BR_UP_CMD = "sudo /etc/sysconfig/network-scripts/ifup-ovs ifcfg-{dev}"
IF_DOWN_CMD = "sudo /etc/sysconfig/network-scripts/ifdown {dev}"
IF_UP_CMD = "sudo /etc/sysconfig/network-scripts/ifup {dev}"
UNDERCLOUD_INSTALL_CMD = "openstack undercloud install"
MGMT_BRIDGE = "br-mgmt"
PROV_BRIDGE = "br-prov"
CONTAINER_IMAGE_PREPARE_CMD = "sudo openstack tripleo container image prepare"
STACK_SHOW_CMD = ("openstack stack show -c stack_name -c stack_status "
"-c creation_time -f json {stack}")
| 41.452174
| 78
| 0.730019
|
052acf67cb929d4ad908e7e39fdd76b00366a8f0
| 6,580
|
py
|
Python
|
plugins/winrm/logic.py
|
Tikido/tikido_agent
|
b2ae70614601fe0fcf5df8784f8694aab0b29ece
|
[
"BSD-3-Clause"
] | null | null | null |
plugins/winrm/logic.py
|
Tikido/tikido_agent
|
b2ae70614601fe0fcf5df8784f8694aab0b29ece
|
[
"BSD-3-Clause"
] | null | null | null |
plugins/winrm/logic.py
|
Tikido/tikido_agent
|
b2ae70614601fe0fcf5df8784f8694aab0b29ece
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import logging
log = logging.getLogger(__name__)
import winrm
from ..agent_core import IAgentCore
class Logic(IAgentCore):
def __init__(self, options):
super().__init__(options)
try:
self.sess = winrm.Session(self.host + (':' + self.port if self.port else ''),
auth=(self.winuser, self.winuserpass),
transport='ssl' if self.ssl == '1' else '',
server_cert_validation='ignore')
# rslt = self.sess.run_cmd('whoami')
# if rslt.status_code == 0:
# log.info(rslt.std_out.decode('utf8'))
# else:
# log.warning(rslt.std_err.decode('utf8'))
except Exception as e:
log.exception('cannot connect to Windows host - %s' % str(e))
raise self.PluginException('cannot connect to Windows host - %s' % str(e))
def whoami(self): # web
rslt = self.sess.run_cmd('whoami')
if rslt.status_code == 0:
log.debug(rslt.std_out.decode('utf8'))
return dict(result='OK', value='success',
vars={}
)
else:
log.warning(rslt.std_err.decode('utf8'))
return dict(result='error', value=rslt.std_err.decode('utf8'), vars={})
def _get_ram(self):
ps_script = """$RAM = WmiObject Win32_ComputerSystem
$MB = 1048576
"Installed Memory: " + [int]($RAM.TotalPhysicalMemory /$MB) + " MB" """
return self.sess.run_ps(ps_script)
def _get_cpu(self):
# TODO more at https://www.datadoghq.com/blog/monitoring-windows-server-2012/
# ps_script = """$ProcessorPercentage = (Get-WmiObject Win32_PerfFormattedData_PerfOS_Processor -filter "Name='_Total'").PercentProcessorTime
# Write-Output "$ProcessorPercentage" """
ps_script = """(Get-WmiObject Win32_PerfFormattedData_PerfOS_Processor -filter "Name='_Total'").PercentProcessorTime"""
# print(self.sess.run_ps( """Get-WmiObject -Query "Select InterruptsPersec from Win32_PerfFormattedData_PerfOS_Processor where Name='_Total'" """).std_out.decode('utf8'))
return int(self.sess.run_ps(ps_script).std_out) # .decode('utf8')
def _get_perf_metrics(self):
ps_script = """$ProcessorPercentage = (Get-WmiObject Win32_PerfFormattedData_PerfOS_Processor -filter "Name='_Total'").PercentProcessorTime
$AvailableMBytes = (Get-WmiObject Win32_PerfFormattedData_PerfOS_Memory).AvailableMBytes
$PercentFreeSpaceDiskC = (Get-WmiObject Win32_PerfFormattedData_PerfDisk_LogicalDisk -filter "Name='C:'").PercentFreeSpace
$PercentFreeSpaceDiskD = (Get-WmiObject Win32_PerfFormattedData_PerfDisk_LogicalDisk -filter "Name='D:'").PercentFreeSpace
$DiskTransfersPersecDiskC = (Get-WmiObject Win32_PerfFormattedData_PerfDisk_LogicalDisk -filter "Name='C:'").DiskTransfersPersec
$DiskTransfersPersecDiskD = (Get-WmiObject Win32_PerfFormattedData_PerfDisk_LogicalDisk -filter "Name='D:'").DiskTransfersPersec
$NetBytesTotalPersec = (Get-WmiObject Win32_PerfFormattedData_Tcpip_NetworkInterface )[0].BytesTotalPersec
Write-Output "$ProcessorPercentage" '|' $AvailableMBytes '|' $PercentFreeSpaceDiskC '|' $PercentFreeSpaceDiskD '|' $DiskTransfersPersecDiskC '|' $DiskTransfersPersecDiskD '|' $NetBytesTotalPersec"""
# print(self.sess.run_ps( """Get-WmiObject -Query "Select InterruptsPersec from Win32_PerfFormattedData_PerfOS_Processor where Name='_Total'" """).std_out.decode('utf8'))
return [int(x) if x else None for x in (_.strip() for _ in self.sess.run_ps(ps_script).std_out.decode('ascii').split('|'))]
def service(self): # monitor and report if service is NOT running
# (get-service "themes").Status
# WMIC Service WHERE "Name = 'SericeName'" GET Started
# or WMIC Service WHERE "Name = 'ServiceName'" GET ProcessId (ProcessId will be zero if service isn't started)
# for non-English Windozws? TODO
#
# call wmic /locale:ms_409 service where (name="wsearch") get state /value | findstr State=Running
# if %ErrorLevel% EQU 0 (
# echo Running
# ) else (
# echo Not running
# )
data = self.sess.run_cmd("sc", ('query', self.svc)).std_out.decode('ascii').strip().split('\r\n')
is_running = 'RUNNING' in data[2].split(': ')[1]
return is_running
def run_cmd(self, command, command_args):
result = self.sess.run_cmd(command, args=command_args)
return dict(std_out=result.std_out.decode('utf8'), std_err=result.std_err.decode('utf8'), status_code=result.status_code)
def run_cmd_stdout(self, command, command_args):
return self.sess.run_cmd(command, args=command_args).std_out.decode('utf8')
def run_ps(self, script):
result = self.sess.run_ps(script)
return dict(std_out=result.std_out.decode('utf8'), std_err=result.std_err.decode('utf8'), status_code=result.status_code)
if __name__ == '__main__':
logging.getLogger('').setLevel(logging.DEBUG)
log.setLevel(logging.DEBUG)
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
formatter = logging.Formatter('\n%(levelname)-8s %(name)-12s %(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
c = Core(
{'host': '10.0.0.157', 'ssl': '1', 'winuser': 'root', 'port': '',
'winuserpass': 'root'}
)
c.pre_action()
c.pp(c.list_services())
0 / 0
# print(c.sess.run_cmd('whoami').std_out.decode('ascii'))
# log.warning('querying....')
# c.pp(c.list_event_logs())
#
# 0/0
# s = winrm.Session('10.0.0.130', auth=('root', 'root'))
# s = winrm.Session('10.211.55.7', auth=('root', 'root'))
s = winrm.Session('10.0.0.157', auth=('root', 'root'), transport='ssl', server_cert_validation='ignore')
# r = s.run_cmd('ipconfig', ['/all'])
# print(r.status_code)
#
# print(r.std_out.decode('utf8'))
# print()
# print(str(r.std_err))
########
r = s.run_cmd('whoami')
print(r.std_out.decode('utf8'))
while 1:
cmd = input('PS> ')
command_args = cmd.split(' ')
print((command_args[0], command_args[1:]))
r = s.run_cmd(command_args[0], command_args[1:])
print(r.status_code)
print(r.std_out.decode('utf8'))
| 45.068493
| 211
| 0.630091
|
d6534ce09d5872d6bdea17d3ba1e45d5ba17a501
| 5,028
|
py
|
Python
|
azfilebak/streamazure.py
|
chgeuer/python_backup_files
|
97263643d831ed8664e59c06311e5d6530afb831
|
[
"MIT"
] | 1
|
2021-07-07T14:59:16.000Z
|
2021-07-07T14:59:16.000Z
|
azfilebak/streamazure.py
|
chgeuer/python_backup_files
|
97263643d831ed8664e59c06311e5d6530afb831
|
[
"MIT"
] | 1
|
2021-06-01T22:37:28.000Z
|
2021-06-01T22:37:28.000Z
|
azfilebak/streamazure.py
|
chgeuer/python_backup_files
|
97263643d831ed8664e59c06311e5d6530afb831
|
[
"MIT"
] | 1
|
2018-09-05T15:39:06.000Z
|
2018-09-05T15:39:06.000Z
|
#!/usr/bin/env python2.7
#
# coding=utf-8
#
from __future__ import print_function
import sys
import json
import urllib2
import argparse
import os
import os.path
from azure.storage.blob import BlockBlobService
def printe(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def lazy_property(fn):
'''Decorator that makes a property lazy-evaluated.
'''
attr_name = '_lazy_' + fn.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazy_property
class BackupException(Exception):
pass
class AzureVMInstanceMetadata:
@staticmethod
def request_metadata(api_version="2017-12-01"):
url="http://169.254.169.254/metadata/instance?api-version={v}".format(v=api_version)
try:
return json.loads(urllib2.urlopen(urllib2.Request(url, None, {'metadata': 'true'})).read())
except Exception as e:
raise(BackupException("Failed to connect to Azure instance metadata endpoint {}:\n{}".format(url, e.message)))
@staticmethod
def test_data():
return '{{ "compute": {{ "name":"vm3728739", "tags":"storage_account_name:{};storage_account_key:{};fs_backup_interval_min:24h;fs_backup_interval_max:3d" }} }}'.format(
os.environ["SAMPLE_STORAGE_ACCOUNT_NAME"],os.environ["SAMPLE_STORAGE_ACCOUNT_KEY"]
)
@staticmethod
def create_instance():
#return AzureVMInstanceMetadata(lambda: (json.JSONDecoder()).decode(AzureVMInstanceMetadata.test_data()))
return AzureVMInstanceMetadata(lambda: AzureVMInstanceMetadata.request_metadata())
def __init__(self, req):
self.req = req
@lazy_property
def json(self):
return self.req()
def get_tags(self):
try:
tags_value = str(self.json['compute']['tags'])
if tags_value == None:
return dict()
return dict(kvp.split(":", 1) for kvp in (tags_value.split(";")))
except Exception as e:
raise(BackupException("Cannot parse tags value from instance metadata endpoint: {}".format(e.message)))
@property
def vm_name(self):
try:
return str(self.json["compute"]["name"])
except Exception:
raise(BackupException("Cannot read VM name from instance metadata endpoint"))
def client_and_container():
config = AzureVMInstanceMetadata.create_instance()
account_name=config.get_tags()["storage_account_name"]
account_key=config.get_tags()["storage_account_key"]
storage_client = BlockBlobService(account_name=account_name, account_key=account_key)
container_name = "backup"
return storage_client, container_name
def backup(args):
storage_client, container_name = client_and_container()
blob_name = args.backup
# printe("Backup to {}".format(storage_client.make_blob_url(container_name, blob_name)))
try:
if not storage_client.exists(container_name=container_name):
storage_client.create_container(container_name=container_name)
storage_client.create_blob_from_stream(
container_name=container_name,
blob_name=blob_name, stream=sys.stdin,
use_byte_buffer=True, max_connections=1)
except Exception as e:
raise BackupException(e.message)
def restore(args):
storage_client, container_name = client_and_container()
blob_name = args.restore
# printe("Restore from {}".format(storage_client.make_blob_url(container_name, blob_name)))
try:
storage_client.get_blob_to_stream(
container_name=container_name,
blob_name=blob_name, stream=sys.stdout,
max_connections=1)
except Exception as e:
raise BackupException(e.message)
def list_backups(args):
storage_client, container_name = client_and_container()
existing_blobs = []
marker = None
while True:
results = storage_client.list_blobs(
container_name=container_name,
marker=marker)
for blob in results:
existing_blobs.append(blob.name)
if results.next_marker:
marker = results.next_marker
else:
break
for blob in existing_blobs:
print("{}".format(blob))
def main():
parser = argparse.ArgumentParser()
commands = parser.add_argument_group("commands")
commands.add_argument("-b", "--backup", help="Perform backup")
commands.add_argument("-r", "--restore", help="Perform restore")
commands.add_argument("-l", "--list", help="List backups in storage", action="store_true")
args = parser.parse_args()
if args.backup:
backup(args)
elif args.restore:
restore(args)
elif args.list:
list_backups(args)
else:
parser.print_help()
sys.exit(1)
if __name__ == '__main__':
try:
main()
except BackupException as be:
printe(be.message)
| 32.43871
| 176
| 0.669053
|
46e5d60bfb226c737fa2ba898fe81e25044d28d2
| 1,035
|
py
|
Python
|
sa/profiles/Nateks/FlexGainACE16/get_version.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 84
|
2017-10-22T11:01:39.000Z
|
2022-02-27T03:43:48.000Z
|
sa/profiles/Nateks/FlexGainACE16/get_version.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 22
|
2017-12-11T07:21:56.000Z
|
2021-09-23T02:53:50.000Z
|
sa/profiles/Nateks/FlexGainACE16/get_version.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 23
|
2017-12-06T06:59:52.000Z
|
2022-02-24T00:02:25.000Z
|
# ---------------------------------------------------------------------
# Nateks.FlexGainACE16.get_version
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetversion import IGetVersion
from noc.core.text import parse_kv
class Script(BaseScript):
name = "Nateks.FlexGainACE16.get_version"
cache = True
interface = IGetVersion
def execute(self):
v = self.cli("show sysinfo", cached=True)
pkv = parse_kv(
{"1.hardware version": "hw", "2.software version": "sw", "3.serial number": "serial"}, v
)
return {
"vendor": "Nateks",
"platform": "ACE",
"version": pkv["sw"],
"attributes": {"HW version": pkv["hw"], "Serial Number": pkv["serial"]},
}
| 33.387097
| 101
| 0.467633
|
6b57703e8f8a88c513f9bb58242eb671e2d276cd
| 1,405
|
py
|
Python
|
pyrl/config.py
|
abesto/pyrl
|
34eb9843217ed5b557bff99ed66ef46b49b5c295
|
[
"MIT"
] | 10
|
2019-12-03T03:59:41.000Z
|
2021-04-13T11:52:20.000Z
|
pyrl/config.py
|
abesto/pyrl
|
34eb9843217ed5b557bff99ed66ef46b49b5c295
|
[
"MIT"
] | 1
|
2021-04-06T03:28:02.000Z
|
2021-05-31T09:34:48.000Z
|
pyrl/config.py
|
abesto/pyrl
|
34eb9843217ed5b557bff99ed66ef46b49b5c295
|
[
"MIT"
] | 1
|
2019-12-03T02:50:02.000Z
|
2019-12-03T02:50:02.000Z
|
#!/usr/bin/env python
import os
from dataclasses import dataclass
from pathlib import Path
from appdirs import user_data_dir
from tcod.color import Color
SCREEN_WIDTH = 80
SCREEN_HEIGHT = 50
ASSETS_DIR = Path("assets")
FONT_PATH = ASSETS_DIR / "arial10x10.png"
MAIN_MENU_BACKGROUND_PATH = ASSETS_DIR / "menu_background1.png"
BAR_WIDTH = 20
PANEL_HEIGHT = 7
PANEL_Y = SCREEN_HEIGHT - PANEL_HEIGHT
MESSAGE_X = BAR_WIDTH + 2
MESSAGE_WIDTH = SCREEN_WIDTH - BAR_WIDTH - 2
MESSAGE_HEIGHT = PANEL_HEIGHT - 1
MAP_WIDTH = SCREEN_WIDTH
MAP_HEIGHT = SCREEN_HEIGHT - PANEL_HEIGHT
INVENTORY_WIDTH = 50
ROOM_MAX_SIZE = 10
ROOM_MIN_SIZE = 6
MAX_ROOMS = 30
FOV_ALGORITHM = 0
FOV_LIGHT_WALLS = True
FOV_RADIUS = 10
DATADIR = Path(user_data_dir("pyrl", "abesto"))
SAVEFILE = DATADIR / "save.dat"
@dataclass
class Theme:
dark_wall: Color
dark_ground: Color
light_wall: Color
light_ground: Color
def background_color(self, wall: bool, visible: bool) -> Color:
if visible:
if wall:
return self.light_wall
else:
return self.light_ground
else:
if wall:
return self.dark_wall
else:
return self.dark_ground
theme = Theme(
dark_wall=Color(0, 0, 100),
dark_ground=Color(50, 50, 150),
light_wall=Color(130, 110, 50),
light_ground=Color(200, 180, 50),
)
| 20.661765
| 67
| 0.681139
|
ec7c152fb8356067b46958012d3bb99c02b20c23
| 31,870
|
py
|
Python
|
cinder/tests/api/v2/test_limits.py
|
hopem/cinder
|
7df656ff0be9fef34a4e19f7b83a0cae554db1e7
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/api/v2/test_limits.py
|
hopem/cinder
|
7df656ff0be9fef34a4e19f7b83a0cae554db1e7
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/api/v2/test_limits.py
|
hopem/cinder
|
7df656ff0be9fef34a4e19f7b83a0cae554db1e7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests dealing with HTTP rate-limiting.
"""
import httplib
import StringIO
from lxml import etree
import webob
from xml.dom import minidom
from cinder.api.v2 import limits
from cinder.api import views
from cinder.api import xmlutil
import cinder.context
from cinder.openstack.common import jsonutils
from cinder import test
TEST_LIMITS = [
limits.Limit("GET", "/delayed", "^/delayed", 1, limits.PER_MINUTE),
limits.Limit("POST", "*", ".*", 7, limits.PER_MINUTE),
limits.Limit("POST", "/volumes", "^/volumes", 3, limits.PER_MINUTE),
limits.Limit("PUT", "*", "", 10, limits.PER_MINUTE),
limits.Limit("PUT", "/volumes", "^/volumes", 5, limits.PER_MINUTE),
]
NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/common/api/v1.0',
}
class BaseLimitTestSuite(test.TestCase):
"""Base test suite which provides relevant stubs and time abstraction."""
def setUp(self):
super(BaseLimitTestSuite, self).setUp()
self.time = 0.0
self.stubs.Set(limits.Limit, "_get_time", self._get_time)
self.absolute_limits = {}
def stub_get_project_quotas(context, project_id, usages=True):
return dict((k, dict(limit=v))
for k, v in self.absolute_limits.items())
self.stubs.Set(cinder.quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
def _get_time(self):
"""Return the "time" according to this test suite."""
return self.time
class LimitsControllerTest(BaseLimitTestSuite):
"""
Tests for `limits.LimitsController` class.
"""
def setUp(self):
"""Run before each test."""
super(LimitsControllerTest, self).setUp()
self.controller = limits.create_resource()
def _get_index_request(self, accept_header="application/json"):
"""Helper to set routing arguments."""
request = webob.Request.blank("/")
request.accept = accept_header
request.environ["wsgiorg.routing_args"] = (None, {
"action": "index",
"controller": "",
})
context = cinder.context.RequestContext('testuser', 'testproject')
request.environ["cinder.context"] = context
return request
def _populate_limits(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("POST", "*", ".*", 5, 60 * 60).display(),
limits.Limit("GET", "changes-since*", "changes-since",
5, 60).display(),
]
request.environ["cinder.limits"] = _limits
return request
def test_empty_index_json(self):
"""Test getting empty limit details in JSON."""
request = self._get_index_request()
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def test_index_json(self):
"""Test getting limit details in JSON."""
request = self._get_index_request()
request = self._populate_limits(request)
self.absolute_limits = {
'gigabytes': 512,
'volumes': 5,
}
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
{
"verb": "POST",
"next-available": "1970-01-01T00:00:00Z",
"unit": "HOUR",
"value": 5,
"remaining": 5,
},
],
},
{
"regex": "changes-since",
"uri": "changes-since*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 5,
"remaining": 5,
},
],
},
],
"absolute": {"maxTotalVolumeGigabytes": 512,
"maxTotalVolumes": 5, },
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _populate_limits_diff_regex(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("GET", "*", "*.*", 10, 60).display(),
]
request.environ["cinder.limits"] = _limits
return request
def test_index_diff_regex(self):
"""Test getting limit details in JSON."""
request = self._get_index_request()
request = self._populate_limits_diff_regex(request)
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
{
"regex": "*.*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _test_index_absolute_limits_json(self, expected):
request = self._get_index_request()
response = request.get_response(self.controller)
body = jsonutils.loads(response.body)
self.assertEqual(expected, body['limits']['absolute'])
def test_index_ignores_extra_absolute_limits_json(self):
self.absolute_limits = {'unknown_limit': 9001}
self._test_index_absolute_limits_json({})
class TestLimiter(limits.Limiter):
pass
class LimitMiddlewareTest(BaseLimitTestSuite):
"""
Tests for the `limits.RateLimitingMiddleware` class.
"""
@webob.dec.wsgify
def _empty_app(self, request):
"""Do-nothing WSGI app."""
pass
def setUp(self):
"""Prepare middleware for use through fake WSGI app."""
super(LimitMiddlewareTest, self).setUp()
_limits = '(GET, *, .*, 1, MINUTE)'
self.app = limits.RateLimitingMiddleware(self._empty_app, _limits,
"%s.TestLimiter" %
self.__class__.__module__)
def test_limit_class(self):
"""Test that middleware selected correct limiter class."""
assert isinstance(self.app._limiter, TestLimiter)
def test_good_request(self):
"""Test successful GET request through middleware."""
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
def test_limited_request_json(self):
"""Test a rate-limited (413) GET request through middleware."""
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(response.status_int, 413)
self.assertTrue('Retry-After' in response.headers)
retry_after = int(response.headers['Retry-After'])
self.assertAlmostEqual(retry_after, 60, 1)
body = jsonutils.loads(response.body)
expected = "Only 1 GET request(s) can be made to * every minute."
value = body["overLimitFault"]["details"].strip()
self.assertEqual(value, expected)
def test_limited_request_xml(self):
"""Test a rate-limited (413) response as XML"""
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
request.accept = "application/xml"
response = request.get_response(self.app)
self.assertEqual(response.status_int, 413)
root = minidom.parseString(response.body).childNodes[0]
expected = "Only 1 GET request(s) can be made to * every minute."
details = root.getElementsByTagName("details")
self.assertEqual(details.length, 1)
value = details.item(0).firstChild.data.strip()
self.assertEqual(value, expected)
class LimitTest(BaseLimitTestSuite):
"""
Tests for the `limits.Limit` class.
"""
def test_GET_no_delay(self):
"""Test a limit handles 1 GET per second."""
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
self.assertEqual(0, limit.next_request)
self.assertEqual(0, limit.last_request)
def test_GET_delay(self):
"""Test two calls to 1 GET per second limit."""
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
delay = limit("GET", "/anything")
self.assertEqual(1, delay)
self.assertEqual(1, limit.next_request)
self.assertEqual(0, limit.last_request)
self.time += 4
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
self.assertEqual(4, limit.next_request)
self.assertEqual(4, limit.last_request)
class ParseLimitsTest(BaseLimitTestSuite):
"""
Tests for the default limits parser in the in-memory
`limits.Limiter` class.
"""
def test_invalid(self):
"""Test that parse_limits() handles invalid input correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
';;;;;')
def test_bad_rule(self):
"""Test that parse_limits() handles bad rules correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'GET, *, .*, 20, minute')
def test_missing_arg(self):
"""Test that parse_limits() handles missing args correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20)')
def test_bad_value(self):
"""Test that parse_limits() handles bad values correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, foo, minute)')
def test_bad_unit(self):
"""Test that parse_limits() handles bad units correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20, lightyears)')
def test_multiple_rules(self):
"""Test that parse_limits() handles multiple rules correctly."""
try:
l = limits.Limiter.parse_limits('(get, *, .*, 20, minute);'
'(PUT, /foo*, /foo.*, 10, hour);'
'(POST, /bar*, /bar.*, 5, second);'
'(Say, /derp*, /derp.*, 1, day)')
except ValueError as e:
assert False, str(e)
# Make sure the number of returned limits are correct
self.assertEqual(len(l), 4)
# Check all the verbs...
expected = ['GET', 'PUT', 'POST', 'SAY']
self.assertEqual([t.verb for t in l], expected)
# ...the URIs...
expected = ['*', '/foo*', '/bar*', '/derp*']
self.assertEqual([t.uri for t in l], expected)
# ...the regexes...
expected = ['.*', '/foo.*', '/bar.*', '/derp.*']
self.assertEqual([t.regex for t in l], expected)
# ...the values...
expected = [20, 10, 5, 1]
self.assertEqual([t.value for t in l], expected)
# ...and the units...
expected = [limits.PER_MINUTE, limits.PER_HOUR,
limits.PER_SECOND, limits.PER_DAY]
self.assertEqual([t.unit for t in l], expected)
class LimiterTest(BaseLimitTestSuite):
"""
Tests for the in-memory `limits.Limiter` class.
"""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
userlimits = {'user:user3': ''}
self.limiter = limits.Limiter(TEST_LIMITS, **userlimits)
def _check(self, num, verb, url, username=None):
"""Check and yield results from checks."""
for x in xrange(num):
yield self.limiter.check_for_delay(verb, url, username)[0]
def _check_sum(self, num, verb, url, username=None):
"""Check and sum results from checks."""
results = self._check(num, verb, url, username)
return sum(item for item in results if item)
def test_no_delay_GET(self):
"""
Simple test to ensure no delay on a single call for a limit verb we
didn"t set.
"""
delay = self.limiter.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_no_delay_PUT(self):
"""
Simple test to ensure no delay on a single call for a known limit.
"""
delay = self.limiter.check_for_delay("PUT", "/anything")
self.assertEqual(delay, (None, None))
def test_delay_PUT(self):
"""
Ensure the 11th PUT will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_POST(self):
"""
Ensure the 8th POST will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 7
results = list(self._check(7, "POST", "/anything"))
self.assertEqual(expected, results)
expected = 60.0 / 7.0
results = self._check_sum(1, "POST", "/anything")
self.failUnlessAlmostEqual(expected, results, 8)
def test_delay_GET(self):
"""
Ensure the 11th GET will result in NO delay.
"""
expected = [None] * 11
results = list(self._check(11, "GET", "/anything"))
self.assertEqual(expected, results)
def test_delay_PUT_volumes(self):
"""
Ensure PUT on /volumes limits at 5 requests, and PUT elsewhere is still
OK after 5 requests...but then after 11 total requests, PUT limiting
kicks in.
"""
# First 6 requests on PUT /volumes
expected = [None] * 5 + [12.0]
results = list(self._check(6, "PUT", "/volumes"))
self.assertEqual(expected, results)
# Next 5 request on PUT /anything
expected = [None] * 4 + [6.0]
results = list(self._check(5, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_PUT_wait(self):
"""
Ensure after hitting the limit and then waiting for the correct
amount of time, the limit will be lifted.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
# Advance time
self.time += 6.0
expected = [None, 6.0]
results = list(self._check(2, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_multiple_delays(self):
"""
Ensure multiple requests still get a delay.
"""
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything"))
self.assertEqual(expected, results)
self.time += 1.0
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_user_limit(self):
"""
Test user-specific limits.
"""
self.assertEqual(self.limiter.levels['user3'], [])
def test_multiple_users(self):
"""
Tests involving multiple users.
"""
# User1
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
# User2
expected = [None] * 10 + [6.0] * 5
results = list(self._check(15, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
# User3
expected = [None] * 20
results = list(self._check(20, "PUT", "/anything", "user3"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [4.0] * 5
results = list(self._check(5, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
class WsgiLimiterTest(BaseLimitTestSuite):
"""
Tests for `limits.WsgiLimiter` class.
"""
def setUp(self):
"""Run before each test."""
super(WsgiLimiterTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
def _request_data(self, verb, path):
"""Get data decribing a limit request verb/path."""
return jsonutils.dumps({"verb": verb, "path": path})
def _request(self, verb, url, username=None):
"""Make sure that POSTing to the given url causes the given username
to perform the given action. Make the internal rate limiter return
delay and make sure that the WSGI app returns the correct response.
"""
if username:
request = webob.Request.blank("/%s" % username)
else:
request = webob.Request.blank("/")
request.method = "POST"
request.body = self._request_data(verb, url)
response = request.get_response(self.app)
if "X-Wait-Seconds" in response.headers:
self.assertEqual(response.status_int, 403)
return response.headers["X-Wait-Seconds"]
self.assertEqual(response.status_int, 204)
def test_invalid_methods(self):
"""Only POSTs should work."""
for method in ["GET", "PUT", "DELETE", "HEAD", "OPTIONS"]:
request = webob.Request.blank("/", method=method)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 405)
def test_good_url(self):
delay = self._request("GET", "/something")
self.assertEqual(delay, None)
def test_escaping(self):
delay = self._request("GET", "/something/jump%20up")
self.assertEqual(delay, None)
def test_response_to_delays(self):
delay = self._request("GET", "/delayed")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed")
self.assertEqual(delay, '60.00')
def test_response_to_delays_usernames(self):
delay = self._request("GET", "/delayed", "user1")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed", "user2")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed", "user1")
self.assertEqual(delay, '60.00')
delay = self._request("GET", "/delayed", "user2")
self.assertEqual(delay, '60.00')
class FakeHttplibSocket(object):
"""
Fake `httplib.HTTPResponse` replacement.
"""
def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`."""
self._buffer = StringIO.StringIO(response_string)
def makefile(self, _mode, _other):
"""Returns the socket's internal buffer."""
return self._buffer
class FakeHttplibConnection(object):
"""
Fake `httplib.HTTPConnection`.
"""
def __init__(self, app, host):
"""
Initialize `FakeHttplibConnection`.
"""
self.app = app
self.host = host
def request(self, method, path, body="", headers=None):
"""
Requests made via this connection actually get translated and routed
into our WSGI app, we then wait for the response and turn it back into
an `httplib.HTTPResponse`.
"""
if not headers:
headers = {}
req = webob.Request.blank(path)
req.method = method
req.headers = headers
req.host = self.host
req.body = body
resp = str(req.get_response(self.app))
resp = "HTTP/1.0 %s" % resp
sock = FakeHttplibSocket(resp)
self.http_response = httplib.HTTPResponse(sock)
self.http_response.begin()
def getresponse(self):
"""Return our generated response from the request."""
return self.http_response
def wire_HTTPConnection_to_WSGI(host, app):
"""Monkeypatches HTTPConnection so that if you try to connect to host, you
are instead routed straight to the given WSGI app.
After calling this method, when any code calls
httplib.HTTPConnection(host)
the connection object will be a fake. Its requests will be sent directly
to the given WSGI app rather than through a socket.
Code connecting to hosts other than host will not be affected.
This method may be called multiple times to map different hosts to
different apps.
This method returns the original HTTPConnection object, so that the caller
can restore the default HTTPConnection interface (for all hosts).
"""
class HTTPConnectionDecorator(object):
"""Wraps the real HTTPConnection class so that when you instantiate
the class you might instead get a fake instance.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
def __call__(self, connection_host, *args, **kwargs):
if connection_host == host:
return FakeHttplibConnection(app, host)
else:
return self.wrapped(connection_host, *args, **kwargs)
oldHTTPConnection = httplib.HTTPConnection
httplib.HTTPConnection = HTTPConnectionDecorator(httplib.HTTPConnection)
return oldHTTPConnection
class WsgiLimiterProxyTest(BaseLimitTestSuite):
"""
Tests for the `limits.WsgiLimiterProxy` class.
"""
def setUp(self):
"""
Do some nifty HTTP/WSGI magic which allows for WSGI to be called
directly by something like the `httplib` library.
"""
super(WsgiLimiterProxyTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
self.oldHTTPConnection = (
wire_HTTPConnection_to_WSGI("169.254.0.1:80", self.app))
self.proxy = limits.WsgiLimiterProxy("169.254.0.1:80")
def test_200(self):
"""Successful request test."""
delay = self.proxy.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_403(self):
"""Forbidden request test."""
delay = self.proxy.check_for_delay("GET", "/delayed")
self.assertEqual(delay, (None, None))
delay, error = self.proxy.check_for_delay("GET", "/delayed")
error = error.strip()
expected = ("60.00", "403 Forbidden\n\nOnly 1 GET request(s) can be "
"made to /delayed every minute.")
self.assertEqual((delay, error), expected)
def tearDown(self):
# restore original HTTPConnection object
httplib.HTTPConnection = self.oldHTTPConnection
super(WsgiLimiterProxyTest, self).tearDown()
class LimitsViewBuilderTest(test.TestCase):
def setUp(self):
super(LimitsViewBuilderTest, self).setUp()
self.view_builder = views.limits.ViewBuilder()
self.rate_limits = [{"URI": "*",
"regex": ".*",
"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"resetTime": 1311272226},
{"URI": "*/volumes",
"regex": "^/volumes",
"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"resetTime": 1311272226}]
self.absolute_limits = {"metadata_items": 1,
"injected_files": 5,
"injected_file_content_bytes": 5}
def test_build_limits(self):
tdate = "2011-07-21T18:17:06Z"
expected_limits = {
"limits": {"rate": [{"uri": "*",
"regex": ".*",
"limit": [{"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": tdate}]},
{"uri": "*/volumes",
"regex": "^/volumes",
"limit": [{"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": tdate}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 5}}}
output = self.view_builder.build(self.rate_limits,
self.absolute_limits)
self.assertDictMatch(output, expected_limits)
def test_build_limits_empty_limits(self):
expected_limits = {"limits": {"rate": [],
"absolute": {}}}
abs_limits = {}
rate_limits = []
output = self.view_builder.build(rate_limits, abs_limits)
self.assertDictMatch(output, expected_limits)
class LimitsXMLSerializationTest(test.TestCase):
def test_xml_declaration(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_index(self):
tdate = "2011-12-15T22:42:45Z"
serializer = limits.LimitsTemplate()
fixture = {"limits": {"rate": [{"uri": "*",
"regex": ".*",
"limit": [{"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": tdate}]},
{"uri": "*/servers",
"regex": "^/servers",
"limit": [{"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": tdate}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 10240}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 4)
for limit in absolutes:
name = limit.get('name')
value = limit.get('value')
self.assertEqual(value, str(fixture['limits']['absolute'][name]))
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 2)
for i, rate in enumerate(rates):
for key in ['uri', 'regex']:
self.assertEqual(rate.get(key),
str(fixture['limits']['rate'][i][key]))
rate_limits = rate.xpath('ns:limit', namespaces=NS)
self.assertEqual(len(rate_limits), 1)
for j, limit in enumerate(rate_limits):
for key in ['verb', 'value', 'remaining', 'unit',
'next-available']:
self.assertEqual(
limit.get(key),
str(fixture['limits']['rate'][i]['limit'][j][key]))
def test_index_no_limits(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 0)
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 0)
| 35.68869
| 79
| 0.526953
|
47912cccae13786cc1704b6e29f58c9528f524d9
| 690
|
py
|
Python
|
minor_lib.py
|
CooolWindS/STC-for-python
|
c8952ec1bdae6408f97b762fdec2c122d6d6f7f0
|
[
"MIT"
] | null | null | null |
minor_lib.py
|
CooolWindS/STC-for-python
|
c8952ec1bdae6408f97b762fdec2c122d6d6f7f0
|
[
"MIT"
] | null | null | null |
minor_lib.py
|
CooolWindS/STC-for-python
|
c8952ec1bdae6408f97b762fdec2c122d6d6f7f0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 29 14:25:10 2021
@author: coolwind
"""
import os
import sys
from PIL import Image
from tqdm import tqdm
import string
import random
import global_var as gvar
def check_dir(path):
if not os.path.isdir(path):
os.makedirs(path)
def read_img(image_path):
image = Image.open(image_path)
(r, g, b) = image.split()
image_split = {'R':r, 'G':g, 'B':b}
return image_split
def channel_choose(mode):
if(mode < 0 or mode > 5):
print('Error channel code, please check channel code in parameter.')
sys.exit(0)
elif(mode > 0 and mode < 5):
return [gvar.channel[mode]]
else:
return gvar.channel[1:]
| 18.157895
| 70
| 0.673913
|
c908775da1c540074b261bee09079d9b0f9c980c
| 25,228
|
py
|
Python
|
reader/bionlp.py
|
ninjin/simsem
|
eb3f45b291226cc50d89a6a06d297d3b9cd36caf
|
[
"ISC"
] | 14
|
2015-01-30T01:36:25.000Z
|
2020-03-25T08:53:46.000Z
|
reader/bionlp.py
|
afcarl/simsem
|
eb3f45b291226cc50d89a6a06d297d3b9cd36caf
|
[
"ISC"
] | null | null | null |
reader/bionlp.py
|
afcarl/simsem
|
eb3f45b291226cc50d89a6a06d297d3b9cd36caf
|
[
"ISC"
] | 3
|
2017-01-11T15:42:25.000Z
|
2020-03-25T07:52:42.000Z
|
'''
XXX:
We use fixed random seeds that was generated by:
hex(randint(0, 2**32))
This ensures a stable pseudo-random distribution.
Author: Pontus Stenetorp <pontus stenetorp se>
Version: 2011-03-02
'''
from collections import defaultdict, namedtuple
from itertools import chain, tee
from os import walk
from os.path import dirname, splitext
from os.path import join as join_path
from re import compile as _compile
from random import sample
from sys import path
from codecs import EncodedFile
from aligner import Aligner, MisalignedError
path.append(join_path(dirname(__file__), '../'))
from resources import Annotation, Document, Sentence
### Constants
TXT_EXT = '.txt'
SS_EXT = '.ss'
A1_EXT = '.a1'
A2_EXT = '.a2'
RES_EXTS = set((TXT_EXT, SS_EXT, A1_EXT, A2_EXT))
# We use this mapping for sorting
PRIO_BY_RES_EXT = {
TXT_EXT: 0,
SS_EXT: 10,
A1_EXT: 20,
A2_EXT: 30,
}
TB_SO_REGEX = _compile((r'^T[0-9]+\t(?P<type>[^ ]+) (?P<start>\d+) '
r'(?P<end>\d+)(?:\t(?P<text>.*?))?$'))
# TODO: data dir could be referred elsewhere
DATA_DIR = join_path(dirname(__file__), '../data/corpora')
BIONLP_2011_DIR = join_path(DATA_DIR, 'bionlp_2011_st')
BIONLP_2009_DIR = join_path(DATA_DIR, 'bionlp_2009_st')
GREC_DIR = join_path(DATA_DIR, 'grec')
CALBC_CII_DIR = join_path(DATA_DIR,
'calbc_ii_st_format_500_sample')
NLPBA_DIR = join_path(DATA_DIR,
'nlpba_slightly_wrong')
NLPBA_DOWN_DIR = join_path(DATA_DIR,
'nlpba_slightly_wrong_downsized')
# 'CALBC.CII.75k.cos98.3.EBI.CL.20101008_st_format')
# Epi and PTM data
BIONLP_2011_EPI_TRAIN_DIR = join_path(BIONLP_2011_DIR,
'BioNLP-ST_2011_Epi_and_PTM_training_data')
BIONLP_2011_EPI_DEV_DIR = join_path(BIONLP_2011_DIR,
'BioNLP-ST_2011_Epi_and_PTM_development_data')
# Genia data
BIONLP_2011_GENIA_TRAIN_DIR = join_path(BIONLP_2011_DIR,
'BioNLP-ST_2011_genia_train_data_rev1')
BIONLP_2011_GENIA_DEV_DIR = join_path(BIONLP_2011_DIR,
'BioNLP-ST_2011_genia_devel_data_rev1')
# Infectious Diseases (ID) data
BIONLP_2011_ID_TRAIN_DIR = join_path(BIONLP_2011_DIR,
'BioNLP-ST_2011_Infectious_Diseases_training_data')
BIONLP_2011_ID_DEV_DIR = join_path(BIONLP_2011_DIR,
'BioNLP-ST_2011_Infectious_Diseases_development_data')
# BioNLP'09 Shared Task data
BIONLP_2009_DEV_DIR = join_path(BIONLP_2009_DIR, 'dev')
BIONLP_2009_TRAIN_DIR = join_path(BIONLP_2009_DIR, 'train')
###
Offset = namedtuple('Offset', ('start', 'end'))
def _extension(path):
return splitext(path)[1]
def _res_ext_cmp(path, other):
return cmp(*(PRIO_BY_RES_EXT[_extension(p)] for p in (path, other)))
# Get aligned: id, txt, ss, a1, a2 function
# XXX: This relies heavily on the filenaming conventions for ST'11
def _get_aligned_resources(dir):
resources_by_id = defaultdict(list)
for root, _, files in walk(dir):
# Get all files with resource extensions
for res_path, res_name in ((join_path(root, filename), filename)
for filename in files if _extension(filename) in RES_EXTS):
resources_by_id[splitext(res_name)[0]].append(res_path)
# Sanity checking
for id, resources in resources_by_id.iteritems():
exts = set((_extension(res_path) for res_path in resources))
# Check for extension collisions
assert len(resources) == len(exts), ('multiple resources '
'with the same extension for id: {}, resources: "{}"'
).format(id, ', '.join(resources))
# Check that all suffixes are present (apart from .a2)
for ext in (e for e in RES_EXTS if e != A2_EXT):
assert ext in exts, ('no resource with extension: {} for id: {}'
).format(ext, id)
# Yield the resources in a determined order
resources.sort(cmp=_res_ext_cmp)
ret = [id]
for res in resources:
ret.append(res)
#XXX: HACK!
if len(ret) == len(RES_EXTS):
ret.append(None)
#print ret
yield ret
NO_STR = True
def _str(s):
if NO_STR:
return unicode(s)
else:
return str(s)
# XXX: Rename this one!
def _get_sentences_and_offsets(txt_handle, ss_handle):
s_starts_and_sentences = []
txt_handle_reads = 0
for s_text in (l.rstrip('\n') for l in ss_handle):
# XXX: We allow multiple spaces to be aligned due to issues with the SS
aligner = Aligner(unicode(s_text, encoding='utf-8'), ignore_mult=set((' ', )))
t_char = None
started_at = txt_handle.tell()
started_at_read = txt_handle_reads
while True:
t_char = unicode(txt_handle.read(1), encoding='utf-8')
txt_handle_reads += 1
if not t_char:
assert False, ('could not align all sentences for: '
'"{}" and "{}" stopped at the sentence: "{}" '
'aligner in state: {}'
).format(txt_handle.name, ss_handle.name,
s_text, aligner.__repr__())
try:
if aligner.align(t_char):
source_text = _str(aligner)
# We are aligned!
s_starts_and_sentences.append((
#txt_handle.tell() - len(source_text),
#started_at,
started_at_read,
Sentence(source_text, [])))
#last_end += aligner.char_cnt
break
except MisalignedError:
started_at = txt_handle.tell()
started_at_read = txt_handle_reads
pass
#s_starts_and_sentences.sort()
return s_starts_and_sentences
from bisect import bisect_left
# Used to get the sentence containing an index
#XXX: Assumes sorted list!
def _find_containing_idx(idx, s_starts_and_sentences):
# Note the tuple dummy to emulate (s_start, s_text)
s_idx = bisect_left(s_starts_and_sentences, (idx, ''))
#if not s_idx:
# raise ValueError
assert s_idx > 0, s_starts_and_sentences
ret_idx = s_idx - 1
assert ret_idx >= 0, ret_idx
return s_starts_and_sentences[ret_idx]
ENCODE_WRAP = True
def _encode_wrap(f):
return EncodedFile(f, 'utf-8')
def _get_documents(dir):
for id, txt_path, ss_path, a1_path, a2_path in _get_aligned_resources(dir):
#print id
# First we align the text and the sentences since we need to map the
# offsets of the stand-off to map to the sentences in the sentence
# split file
#with open(txt_path, 'r') as txt_file, open(ss_path, 'r') as ss_file:
with open(txt_path, 'r') as txt_file:
if ENCODE_WRAP:
txt_file = _encode_wrap(txt_file)
with open(ss_path, 'r') as ss_file:
if ENCODE_WRAP:
ss_file = _encode_wrap(ss_file)
#sentences, s_offset_by_sentence = (
s_starts_and_sentences = (
_get_sentences_and_offsets(txt_file, ss_file))
#XXX: HACK!
if a2_path is None:
a2_path = '/dev/null'
#with open(a1_path, 'r') as a1_file, open(a2_path, 'r') as a2_file:
with open(a1_path, 'r') as a1_file:
if ENCODE_WRAP:
a1_file = _encode_wrap(a1_file)
with open(a2_path, 'r') as a2_file:
if ENCODE_WRAP:
a2_file = _encode_wrap(a2_file)
for line in (l.rstrip('\n') for l in chain(a1_file, a2_file)):
# We ignore everything apart from the text-bound annotations
match = TB_SO_REGEX.match(line)
if match is not None:
g_dict = match.groupdict()
ann_start = int(g_dict['start'])
ann_end = int(g_dict['end'])
# Find the sentence and its index containing the annotation
s_idx, sentence = _find_containing_idx(ann_start,
s_starts_and_sentences)
# XXX: There are cases where an annotation is cut-off
# by a sentence break. If this is the case, merge
# the sentences.
if ann_end > s_idx + len(sentence.text):
next_s_idx, next_sentence = _find_containing_idx(
ann_end, s_starts_and_sentences)
# Merge the next sentence into this one
# XXX: Just assumes a space! May be wrong!
sentence = Sentence(sentence.text + ' ' + next_sentence.text,
sentence.annotations + next_sentence.annotations)
# Remove the old one
s_starts_and_sentences.remove((next_s_idx, next_sentence))
# Create an annotation object but adjust the indices to
# be relative to the sentence and not to the file
new_ann_start = ann_start - s_idx
assert 0 <= new_ann_start < len(sentence.text), '0 <= {} < {} ({}, {}) {} "{}" {}'.format(
new_ann_start, len(sentence.text), s_idx, g_dict['start'], id, g_dict['text'], s_idx)
new_ann_end = ann_end - s_idx
assert 0 < new_ann_end <= len(sentence.text), '0 < {} <= {} ({}, {}) {} {}'.format(
new_ann_end, len(sentence.text), s_idx, g_dict['end'], id, g_dict['text'])
assert new_ann_start < new_ann_end
annotation = Annotation(
ann_start - s_idx, ann_end - s_idx, g_dict['type'])
# If we have a text span in the stand-off we sanity check
# it against what is in the sentence
#XXX: Run this again!
if g_dict['text'] is not None:
g_dict['text'] = unicode(g_dict['text'].strip('\r\n'), encoding='utf-8') #XXX: Regex is not perfect
# it leaves spaces around
target_ann_text = sentence.annotation_text(annotation)
assert target_ann_text == g_dict['text'], (
'text span mismatch in {} '
'target: "{}" != source: "{}" {} "{}" {} {} {}'
).format(id, target_ann_text, g_dict['text'],
annotation, sentence.text, g_dict,
type(target_ann_text), type(g_dict['text']))
sentence.add_annotation(annotation)
#else:
# assert False, line.replace(' ', '\s').replace('\t', '\\t')
yield Document(id, [],
[sentence for _, sentence in s_starts_and_sentences],
txt_path)
from random import seed as random_seed
# TODO: Can be generalised to more of a tee
def _evenly_split_it(it, seed=None):
# Determine the length of the sequence
cnt_it, it = tee(it, 2)
cnt = max(i for i, _ in enumerate(cnt_it, start=1))
if seed is not None:
random_seed(seed)
# Create a set with the elements to use for the first sequence
one_indices = set(sample(xrange(cnt), (cnt + 1) / 2))
# Create and filter two new iterators to give different elements
nfilt_one, nfilt_two = tee(it, 2)
return ((e for i, e in enumerate(nfilt_one) if i in one_indices),
(e for i, e in enumerate(nfilt_two) if i not in one_indices))
'''
_a, _b = _evenly_split_it(xrange(10))
assert max((i for i, _ in enumerate(_a))) == max((i for i, _ in enumerate(_b)))
__a = set([e for e in _a])
__b = set([e for e in _b])
print __a, __b
assert __a & __b == set()
assert False
'''
# hex(randint(0, 2**32))
def get_epi_set():
# Split the development set in two
dev_it, test_it = _evenly_split_it(
_get_documents(BIONLP_2011_EPI_DEV_DIR), seed=0x22612817)
return (_get_documents(BIONLP_2011_EPI_TRAIN_DIR), dev_it, test_it)
def get_genia_set():
# Split the development set in two
dev_it, test_it = _evenly_split_it(
_get_documents(BIONLP_2011_GENIA_DEV_DIR), seed=0xd81979e3)
return (_get_documents(BIONLP_2011_GENIA_TRAIN_DIR), dev_it, test_it)
def get_id_set():
# Split the development set in two
dev_it, test_it = _evenly_split_it(
_get_documents(BIONLP_2011_ID_DEV_DIR), seed=0xb2f32d56)
return (_get_documents(BIONLP_2011_ID_TRAIN_DIR), dev_it, test_it)
# XXX: Does not belong here!!!
# XXX: Hack, just works, (tm)
def get_calbc_cii_set():
random_seed('0xc78e13c3')
#XXX: We down-sample to 250, 125, 125
paths = sample([d.path for d in _get_documents(CALBC_CII_DIR)], 500)
train_paths = set(sample(paths, len(paths) / 2))
paths = [p for p in paths if p not in train_paths]
dev_paths = set(sample(paths, len(paths) / 2))
test_paths = set([p for p in paths if p not in dev_paths])
del paths
#print 'train', train_paths
#print 'dev', dev_paths
#print 'test', test_paths
return (
(d for d in _get_documents(CALBC_CII_DIR)
if d.path in train_paths),
(d for d in _get_documents(CALBC_CII_DIR)
if d.path in dev_paths),
(d for d in _get_documents(CALBC_CII_DIR)
if d.path in test_paths),
)
def get_bionlp_2009_set():
# Split the development set in two
dev_it, test_it = _evenly_split_it(
_get_documents(BIONLP_2009_DEV_DIR), seed=0xb2f32d56)
return (_get_documents(BIONLP_2009_TRAIN_DIR), dev_it, test_it)
### GREC Constants
# According to GREC annotation guidlines
GREC_SUPER_CONCEPTS = set((
'EXPERIMENTAL',
'LIVING_SYSTEM',
'NUCLEIC_ACIDS',
'PROCESSES',
'PROTEINS',
))
GREC_ONTOLOGY = {
# PROTEINS Sub-ontology
'PROTEIN_STRUCTURE': 'PROTEINS',
'Polypeptide': 'PROTEIN_STRUCTURE',
'Peptide': 'PROTEIN_STRUCTURE',
'Protein_Complex': 'PROTEIN_STRUCTURE',
'Protein': 'PROTEIN_STRUCTURE',
'Amino_Acids': 'PROTEIN_STRUCTURE',
'Conformation': 'Polypeptide',
'Sub_Unit': 'Polypeptide',
'Domain': 'Conformation',
'Motif': 'Conformation',
'PROTEIN_FUNCTION': 'PROTEINS',
'Transcription_Factor': 'PROTEIN_FUNCTION',
'Enzyme': 'PROTEIN_FUNCTION',
'Regulator': 'PROTEIN_FUNCTION',
'Sigma_Factor': 'Transcription_Factor',
'Repressor': 'Transcription_Factor',
'Rho_Factor': 'Transcription_Factor',
'Activator': 'Transcription_Factor',
'RNA_Polymerase': 'Enzyme',
'DNA_Polymerase': 'Enzyme',
'Restriction_Enzyme': 'Enzyme',
# NUCLEIC_ACIDS Sub-ontology
'SEQUENCE&STRUCTURE': 'NUCLEIC_ACIDS',
'Chromosome': 'SEQUENCE&STRUCTURE',
'Plasmid': 'SEQUENCE&STRUCTURE',
'Viral_Vector': 'SEQUENCE&STRUCTURE',
'RNA': 'SEQUENCE&STRUCTURE',
'DNA': 'SEQUENCE&STRUCTURE',
'Locus': 'Chromosome',
'Gene': 'Chromosome',
'Operon': 'Chromosome',
'Mutant_Gene': 'Gene',
'ORF': 'Gene',
'Allele': 'Gene',
'REGULATION&EXPRESSION': 'NUCLEIC_ACIDS',
'Transcription_Binding_Site': 'REGULATION&EXPRESSION',
'Termination_Site': 'REGULATION&EXPRESSION',
'Phenotype': 'REGULATION&EXPRESSION',
'Ribosome': 'REGULATION&EXPRESSION',
'mRNA': 'REGULATION&EXPRESSION',
'Promoter': 'Transcription_Binding_Site',
'Operator': 'Transcription_Binding_Site',
'Enhancer': 'Transcription_Binding_Site',
# LIVING_SYSTEM Sub-ontology
'Prokaryotes': 'LIVING_SYSTEM',
'Virus': 'LIVING_SYSTEM',
'Eukaryotes': 'LIVING_SYSTEM',
'Bacteria': 'Prokaryotes',
'Non-Bacteria': 'Prokaryotes',
#XXX: This one is Wild-Type_Bacteria
'Wild_Type_Bacteria': 'Bacteria',
'Mutant_Bacteria': 'Bacteria',
'Tissue': 'Eukaryotes',
'Cells': 'Eukaryotes',
'Organelles': 'Eukaryotes',
'Organism': 'Eukaryotes',
'Wild_Type_Organism': 'Organism',
'Mutant_Organism': 'Organism',
# PROCESSES Sub-ontology
'Gene_Expression': 'PROCESSES',
'Recombination': 'PROCESSES',
'Mutation': 'PROCESSES',
'Transcription': 'PROCESSES',
'Replication': 'PROCESSES',
'Methylation': 'PROCESSES',
'Regulation': 'PROCESSES',
'Gene_Activation': 'Gene_Expression',
'Gene_Repression': 'Gene_Expression',
'Insertion': 'Mutation',
'Deletion': 'Mutation',
'Null_Mutation': 'Mutation',
'Point_Mutation': 'Mutation',
# EXPERIMENTAL Sub-ontology
'Reagents': 'EXPERIMENTAL',
'Experimental_Technique': 'EXPERIMENTAL',
'Experimental_Equipment': 'EXPERIMENTAL',
'Inorganic_Compounds': 'Reagents',
'Organic_Compounds': 'Reagents',
'Biological_Fluids': 'Reagents',
'Other_Compounds': 'Reagents',
'Laboratory_Technique': 'Experimental_Technique',
'Computational_Analysis': 'Experimental_Technique',
}
# XXX: This will of course crash if you give it an unknown term
def _collapse_grec_term(term):
# XXX: Check if it is an event trigger
if term == 'GRE':
return term
while True:
parent = GREC_ONTOLOGY[term]
if parent in GREC_SUPER_CONCEPTS:
return parent
else:
term = parent
# XXX: Strictly generic terms annotated in GREC
GREC_GENERIC = set(('SPAN', ))
def _filter_generic_grec_terms(doc_it):
for doc in doc_it:
for sent in doc:
sent.annotations = [ann for ann in sent
if ann.type not in GREC_GENERIC]
yield doc
def _alter_terms(doc_it, alt_f):
for doc in doc_it:
for sent in doc:
for ann in sent:
ann.type = alt_f(ann.type)
yield doc
###
# A bit ugly but works
def get_grec_set():
random_seed(0x763f059c)
paths = [d.path for d in _get_documents(GREC_DIR)]
train_paths = set(sample(paths, len(paths) / 2))
paths = [p for p in paths if p not in train_paths]
dev_paths = set(sample(paths, len(paths) / 2))
test_paths = set([p for p in paths if p not in dev_paths])
del paths
#print 'train', train_paths
#print 'dev', dev_paths
#print 'test', test_paths
return [_filter_generic_grec_terms(it) for it in (
(d for d in _get_documents(GREC_DIR)
if d.path in train_paths),
(d for d in _get_documents(GREC_DIR)
if d.path in dev_paths),
(d for d in _get_documents(GREC_DIR)
if d.path in test_paths),
)]
def get_super_grec_set():
return [_alter_terms(it, _collapse_grec_term) for it in get_grec_set()]
# A bit ugly but works
def get_nlpba_set():
random_seed(0x25e453f9)
paths = [d.path for d in _get_documents(NLPBA_DIR)]
train_paths = set(sample(paths, len(paths) / 2))
paths = [p for p in paths if p not in train_paths]
dev_paths = set(sample(paths, len(paths) / 2))
test_paths = set([p for p in paths if p not in dev_paths])
del paths
#print 'train', train_paths
#print 'dev', dev_paths
#print 'test', test_paths
return (
(d for d in _get_documents(NLPBA_DIR)
if d.path in train_paths),
(d for d in _get_documents(NLPBA_DIR)
if d.path in dev_paths),
(d for d in _get_documents(NLPBA_DIR)
if d.path in test_paths),
)
def get_nlpba_down_set():
random_seed(0x8a3403ed)
paths = [d.path for d in _get_documents(NLPBA_DOWN_DIR)]
train_paths = set(sample(paths, len(paths) / 2))
paths = [p for p in paths if p not in train_paths]
dev_paths = set(sample(paths, len(paths) / 2))
test_paths = set([p for p in paths if p not in dev_paths])
del paths
return (
(d for d in _get_documents(NLPBA_DOWN_DIR)
if d.path in train_paths),
(d for d in _get_documents(NLPBA_DOWN_DIR)
if d.path in dev_paths),
(d for d in _get_documents(NLPBA_DOWN_DIR)
if d.path in test_paths),
)
# XXX: We have essentially used Genia to create dictionaries, remove them
SETS_BY_ID = {
'BioNLP-ST-2011-Epi_and_PTM': get_epi_set,
'BioNLP-ST-2011-genia': get_genia_set,
'BioNLP-ST-2011-Infectious_Diseases': get_id_set,
##'BioNLP-ST-2009': get_bionlp_2009_set,
'GREC': get_grec_set,
'SUPER_GREC': get_super_grec_set,
'CALBC_CII': get_calbc_cii_set,
'NLPBA': get_nlpba_set,
'NLPBA_DOWN': get_nlpba_down_set,
}
def _document_stats(docs):
doc_cnt = 0
snt_cnt = 0
ann_cnt = 0
from collections import defaultdict
ann_cnt_by_type = defaultdict(int)
for doc in docs:
doc_cnt += 1
for snt in doc:
snt_cnt += 1
for ann in snt:
ann_cnt += 1
ann_cnt_by_type[ann.type] += 1
print 'Documents:', doc_cnt
print 'Sentences:', snt_cnt
ann_sum = sum(ann_cnt_by_type.itervalues())
print 'Annotations:', ann_sum
if ann_sum:
print 'Annotations by type:'
for type, cnt in ((t, ann_cnt_by_type[t])
for t in sorted([k for k in ann_cnt_by_type])):
print '\t' + type + ': ' + _str(cnt), '({0:.2%})'.format(
cnt / float(ann_cnt))
PLOT_AND_TABLES = True
FULL_SET = False
#PLOT_CUT_OFF = 10
def grey_scale(steps):
limit = 40
ret = [str((100 - shade) / 100.0)
for shade in range(0, 100 - limit + 1, (100 - limit) / 5)]
return ret
def _plot_and_tables(name, docs):
try:
import matplotlib
import matplotlib.pyplot as plt
PLOT = True
except ImportError:
PLOT = False
from collections import defaultdict
ann_cnt = 0
ann_cnt_by_type = defaultdict(int)
for doc in docs:
for snt in doc:
for ann in snt:
ann_cnt += 1
ann_cnt_by_type[ann.type] += 1
types = [k for k in ann_cnt_by_type]
types.sort()
fracs = [ann_cnt_by_type[t] / float(ann_cnt) for t in types]
occs = [ann_cnt_by_type[t] for t in types]
labels = [t.replace('_', ' ') for t in types]
if PLOT:
#if len(ann_cnt_by_type) <= PLOT_CUT_OFF:
c_scale = grey_scale(len(labels))
plt.pie(fracs, labels=labels,
autopct='%1.1f%%', shadow=False, colors=c_scale)
#plt.title('Annotated Word Types')
for fmt in ('svg', 'png'):
plt.savefig((id + '_pie' + '.' + fmt).lower(), format=fmt)
plt.cla()
plt.clf()
#else:
# TODO: Generate which order it is, largest, etc.
with open((id + '_table.tex').lower(), 'w') as table_file:
table_file.write('\n'.join((
'\\begin{tabular}{lrr}',
'\\toprule',
'Type & Ratio & Annotations \\\\',
'\\midrule',
)))
table_file.write('\n')
for l, r, o in zip(labels, fracs, occs):
table_file.write('{0} & {1:.1f}\\% & {2} \\\\\n'.format(l, r * 100, o))
table_file.write('\n'.join((
'\\midrule',
'Total: & & {0} \\\\'.format(sum(ann_cnt_by_type.itervalues())),
'\\bottomrule',
'\\end{tabular}',
)))
PRINT_STATS = False
if __name__ == '__main__':
#print 'GREC'
#_document_stats(chain(*(SETS_BY_ID['GREC']())))
#print
#print 'SUPER_GREC'
#_document_stats(chain(*(SETS_BY_ID['SUPER_GREC']())))
#exit(0)
# TODO: Check if dirs exist and generates files!
# We just iterate over all of it to detect errors
for i, id, get_set in ((j, t[0], t[1])
for j, t in enumerate(SETS_BY_ID.iteritems())):
if i != 0:
print
print id
if PRINT_STATS:
train, dev, test = get_set()
print 'Train Set'
_document_stats(train)
print
print 'Development Set'
_document_stats(dev)
print
print 'Test Set'
_document_stats(test)
if FULL_SET:
print 'Full Set'
_document_stats(chain(*get_set()))
if PLOT_AND_TABLES:
print 'Plotting...',
_plot_and_tables(id, chain(*get_set()))
print 'Done!'
| 36.04
| 127
| 0.581457
|
a36c01d096dcdf59a03d61c1dce0ac045f20f33e
| 3,312
|
py
|
Python
|
graphql_in_python/settings.py
|
anirvansen/graphql_in_python
|
f7ec3709123ce481719147cafac70070c0eb0628
|
[
"MIT"
] | null | null | null |
graphql_in_python/settings.py
|
anirvansen/graphql_in_python
|
f7ec3709123ce481719147cafac70070c0eb0628
|
[
"MIT"
] | null | null | null |
graphql_in_python/settings.py
|
anirvansen/graphql_in_python
|
f7ec3709123ce481719147cafac70070c0eb0628
|
[
"MIT"
] | null | null | null |
"""
Django settings for graphql_in_python project.
Generated by 'django-admin startproject' using Django 3.2.9.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-)a_2^z*$z2bp5k0)nyz$k!$d$nhfig)j_)kla-xt+r)e8uxy7k'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'graphene_django',
'user_post'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'graphql_in_python.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'graphql_in_python.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| 25.674419
| 91
| 0.702597
|
a93346482c7539963fd90d831bc86a86cd47fb31
| 799
|
py
|
Python
|
tests/storitch/test_utils.py
|
thomaserlang/storitch
|
dbcf97af547d9cb1ae5c3994654e8db03e43a253
|
[
"MIT"
] | null | null | null |
tests/storitch/test_utils.py
|
thomaserlang/storitch
|
dbcf97af547d9cb1ae5c3994654e8db03e43a253
|
[
"MIT"
] | 1
|
2022-03-03T00:35:08.000Z
|
2022-03-03T00:35:08.000Z
|
tests/storitch/test_utils.py
|
thomaserlang/storitch
|
dbcf97af547d9cb1ae5c3994654e8db03e43a253
|
[
"MIT"
] | null | null | null |
import unittest
from storitch.utils import path_from_hash
class Test(unittest.TestCase):
def test_path_from_hash(self):
hash_ = '1b4f0e9851971998e732078544c96b36c3d01cedf7caa332359d6f1d83567014'
path = path_from_hash(
hash_,
levels=1,
length=2,
)
self.assertEqual(
'1b',
path,
)
path = path_from_hash(
hash_,
levels=2,
length=2,
)
self.assertEqual(
'1b/4f',
path,
)
path = path_from_hash(
hash_,
levels=3,
length=3,
)
self.assertEqual(
'1b4/f0e/985',
path,
)
if __name__ == '__main__':
unittest.main()
| 19.975
| 82
| 0.480601
|
5b593ab728def49147698fc9121ccac5a46e286e
| 218
|
py
|
Python
|
run_tests.py
|
rizar/baby-ai-game
|
7c5f00ed1b9e756068e0b5d3b76c8d03036c5990
|
[
"BSD-3-Clause"
] | 5
|
2018-03-03T03:14:02.000Z
|
2019-12-27T21:40:14.000Z
|
run_tests.py
|
rizar/baby-ai-game
|
7c5f00ed1b9e756068e0b5d3b76c8d03036c5990
|
[
"BSD-3-Clause"
] | null | null | null |
run_tests.py
|
rizar/baby-ai-game
|
7c5f00ed1b9e756068e0b5d3b76c8d03036c5990
|
[
"BSD-3-Clause"
] | 1
|
2019-04-15T14:30:14.000Z
|
2019-04-15T14:30:14.000Z
|
#!/usr/bin/env python3
import babyai
from babyai import levels
from babyai import agents
# NOTE: please make sure that tests are always deterministic
print('Testing levels, mission generation')
levels.levels.test()
| 19.818182
| 60
| 0.788991
|
ac314726f848551e44d05dfd060683bd6c76b47d
| 5,942
|
py
|
Python
|
dnacentersdk/models/validators/v2_1_2/jsd_50b589fd4c7a930a.py
|
nonstdout/dnacentersdk
|
dbbbc4baa5300aa9e5c9193f2ea71438018095f5
|
[
"MIT"
] | null | null | null |
dnacentersdk/models/validators/v2_1_2/jsd_50b589fd4c7a930a.py
|
nonstdout/dnacentersdk
|
dbbbc4baa5300aa9e5c9193f2ea71438018095f5
|
[
"MIT"
] | null | null | null |
dnacentersdk/models/validators/v2_1_2/jsd_50b589fd4c7a930a.py
|
nonstdout/dnacentersdk
|
dbbbc4baa5300aa9e5c9193f2ea71438018095f5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""DNA Center Create Site data model.
Copyright (c) 2019-2020 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import fastjsonschema
import json
from dnacentersdk.exceptions import MalformedRequest
from builtins import *
class JSONSchemaValidator50B589Fd4C7A930A(object):
"""Create Site request schema definition."""
def __init__(self):
super(JSONSchemaValidator50B589Fd4C7A930A, self).__init__()
self._validator = fastjsonschema.compile(json.loads(
'''{
"properties": {
"site": {
"description":
"Site",
"properties": {
"area": {
"description":
"Area",
"properties": {
"name": {
"description":
"Name",
"type": [
"string",
"null"
]
},
"parentName": {
"description":
"Parent Name",
"type": [
"string",
"null"
]
}
},
"type": [
"object",
"null"
]
},
"building": {
"description":
"Building",
"properties": {
"address": {
"description":
"Address",
"type": [
"string",
"null"
]
},
"latitude": {
"type": [
"number",
"null"
]
},
"longitude": {
"type": [
"number",
"null"
]
},
"name": {
"description":
"Name",
"type": [
"string",
"null"
]
},
"parentName": {
"description":
"Parent Name",
"type": [
"string",
"null"
]
}
},
"type": [
"object",
"null"
]
},
"floor": {
"description":
"Floor",
"properties": {
"height": {
"type": [
"number",
"null"
]
},
"length": {
"type": [
"number",
"null"
]
},
"name": {
"description":
"Name",
"type": [
"string",
"null"
]
},
"parentName": {
"description":
"Parent Name",
"type": [
"string",
"null"
]
},
"rfModel": {
"description":
"Rf Model",
"enum": [
"Cubes And Walled Offices",
"Drywall Office Only",
"Indoor High Ceiling",
"Outdoor Open Space",
null
],
"type": [
"string",
"null"
]
},
"width": {
"type": [
"number",
"null"
]
}
},
"type": [
"object",
"null"
]
}
},
"type": [
"object"
]
},
"type": {
"description":
"Type",
"enum": [
"area",
"building",
"floor",
null
],
"type": [
"string"
]
}
},
"required": [
"type",
"site"
],
"type": "object"
}'''.replace("\n" + ' ' * 16, '')
))
def validate(self, request):
try:
self._validator(request)
except fastjsonschema.exceptions.JsonSchemaException as e:
raise MalformedRequest(
'{} is invalid. Reason: {}'.format(request, e.message)
)
| 27.509259
| 78
| 0.368899
|
5b7900eeacb12380259c549c07bc7d5058422d2d
| 194
|
py
|
Python
|
bilgisayfam/entry/helpers.py
|
tayfun/bilgisayfam
|
f2f9942be53abefc1caf31509553c46957195535
|
[
"MIT"
] | 1
|
2016-10-03T19:30:04.000Z
|
2016-10-03T19:30:04.000Z
|
bilgisayfam/entry/helpers.py
|
tayfun/bilgisayfam
|
f2f9942be53abefc1caf31509553c46957195535
|
[
"MIT"
] | null | null | null |
bilgisayfam/entry/helpers.py
|
tayfun/bilgisayfam
|
f2f9942be53abefc1caf31509553c46957195535
|
[
"MIT"
] | null | null | null |
def get_json_cache_key(keyword):
"""
Returns a cache key for the given word and dictionary.
"""
version = "1.0"
return ":".join(["entry:utils", version, "meaning", keyword])
| 27.714286
| 65
| 0.628866
|
411348ff71dec050a95511732356041f27cd8a39
| 1,056
|
py
|
Python
|
mlrun/serving/__init__.py
|
shul/mlrun
|
d99e08ba8dce9833fca3ab00cdd246d873cf16b6
|
[
"Apache-2.0"
] | null | null | null |
mlrun/serving/__init__.py
|
shul/mlrun
|
d99e08ba8dce9833fca3ab00cdd246d873cf16b6
|
[
"Apache-2.0"
] | null | null | null |
mlrun/serving/__init__.py
|
shul/mlrun
|
d99e08ba8dce9833fca3ab00cdd246d873cf16b6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"V2ModelServer",
"VotingEnsemble",
"GraphServer",
"create_graph_server",
"GraphContext",
"TaskState",
"RouterState",
"QueueState",
]
from .v1_serving import MLModelServer, new_v1_model_server # noqa
from .v2_serving import V2ModelServer # noqa
from .routers import VotingEnsemble, ModelRouter # noqa
from .states import TaskState, RouterState, QueueState # noqa
from .server import GraphServer, create_graph_server, GraphContext # noqa
| 34.064516
| 74
| 0.750947
|
53a46af63dfc4bf36929671b3db124e81a3992a7
| 5,219
|
py
|
Python
|
Tests/test_KeyWList.py
|
benjaminr/biopython
|
ad4fe9b0026fd9d4aa417168f4b620f6a681a0ff
|
[
"BSD-3-Clause"
] | 1
|
2020-02-13T14:32:44.000Z
|
2020-02-13T14:32:44.000Z
|
Tests/test_KeyWList.py
|
benjaminr/biopython
|
ad4fe9b0026fd9d4aa417168f4b620f6a681a0ff
|
[
"BSD-3-Clause"
] | null | null | null |
Tests/test_KeyWList.py
|
benjaminr/biopython
|
ad4fe9b0026fd9d4aa417168f4b620f6a681a0ff
|
[
"BSD-3-Clause"
] | 1
|
2019-03-14T18:59:30.000Z
|
2019-03-14T18:59:30.000Z
|
# Copyright 1999 by Jeffrey Chang. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Tests for KeyWList."""
import unittest
import os.path
from Bio.SwissProt import KeyWList
class KeyWListTest(unittest.TestCase):
"""Tests for KeyWList."""
def test_parse(self):
"""Test parsing keywlist.txt works."""
filename = os.path.join("SwissProt", "keywlist.txt")
handle = open(filename)
records = KeyWList.parse(handle)
# Testing the first record
record = next(records)
self.assertEqual(record["ID"], "2Fe-2S.")
self.assertEqual(record["AC"], "KW-0001")
self.assertEqual(record["DE"], "Protein which contains at least one 2Fe-2S iron-sulfur cluster: 2 iron atoms complexed to 2 inorganic sulfides and 4 sulfur atoms of cysteines from the protein.")
self.assertEqual(record["SY"], "Fe2S2; [2Fe-2S] cluster; [Fe2S2] cluster; Fe2/S2 (inorganic) cluster; Di-mu-sulfido-diiron; 2 iron, 2 sulfur cluster binding.")
self.assertEqual(len(record["GO"]), 1)
self.assertEqual(record["GO"], ["GO:0051537; 2 iron, 2 sulfur cluster binding"])
self.assertEqual(len(record["HI"]), 2)
self.assertEqual(record["HI"][0], "Ligand: Iron; Iron-sulfur; 2Fe-2S.")
self.assertEqual(record["HI"][1], "Ligand: Metal-binding; 2Fe-2S.")
self.assertEqual(record["CA"], "Ligand.")
# Testing the second record
record = next(records)
self.assertEqual(record["IC"], "Molecular function.")
self.assertEqual(record["AC"], "KW-9992")
self.assertEqual(record["DE"], "Keywords assigned to proteins due to their particular molecular function.")
# Testing the third record
record = next(records)
self.assertEqual(record["ID"], "Zymogen.")
self.assertEqual(record["AC"], "KW-0865")
self.assertEqual(record["DE"], "The enzymatically inactive precursor of mostly proteolytic enzymes.")
self.assertEqual(record["SY"], "Proenzyme.")
self.assertEqual(len(record["HI"]), 1)
self.assertEqual(record["HI"][0], "PTM: Zymogen.")
self.assertEqual(record["CA"], "PTM.")
handle.close()
def test_parse2(self):
"""Parsing keywlist2.txt (without header and footer)."""
filename = os.path.join("SwissProt", "keywlist2.txt")
handle = open(filename)
records = KeyWList.parse(handle)
# Testing the first record
record = next(records)
self.assertEqual(record["ID"], "2Fe-2S.")
self.assertEqual(record["AC"], "KW-0001")
self.assertEqual(record["DE"], "Protein which contains at least one 2Fe-2S iron-sulfur cluster: 2 iron atoms complexed to 2 inorganic sulfides and 4 sulfur atoms of cysteines from the protein.")
self.assertEqual(record["SY"], "Fe2S2; [2Fe-2S] cluster; [Fe2S2] cluster; Fe2/S2 (inorganic) cluster; Di-mu-sulfido-diiron; 2 iron, 2 sulfur cluster binding.")
self.assertEqual(len(record["GO"]), 1)
self.assertEqual(record["GO"], ["GO:0051537; 2 iron, 2 sulfur cluster binding"])
self.assertEqual(len(record["HI"]), 2)
self.assertEqual(record["HI"][0], "Ligand: Iron; Iron-sulfur; 2Fe-2S.")
self.assertEqual(record["HI"][1], "Ligand: Metal-binding; 2Fe-2S.")
self.assertEqual(record["CA"], "Ligand.")
# Testing the second record
record = next(records)
self.assertEqual(record["ID"], "3D-structure.")
self.assertEqual(record["AC"], "KW-0002")
self.assertEqual(record["DE"], "Protein, or part of a protein, whose three-dimensional structure has been resolved experimentally (for example by X-ray crystallography or NMR spectroscopy) and whose coordinates are available in the PDB database. Can also be used for theoretical models.")
self.assertEqual(len(record["HI"]), 1)
self.assertEqual(record["HI"][0], "Technical term: 3D-structure.")
self.assertEqual(record["CA"], "Technical term.")
# Testing the third record
record = next(records)
self.assertEqual(record["ID"], "3Fe-4S.")
self.assertEqual(record["AC"], "KW-0003")
self.assertEqual(record["DE"], "Protein which contains at least one 3Fe-4S iron-sulfur cluster: 3 iron atoms complexed to 4 inorganic sulfides and 3 sulfur atoms of cysteines from the protein. In a number of iron-sulfur proteins, the 4Fe-4S cluster can be reversibly converted by oxidation and loss of one iron ion to a 3Fe-4S cluster.")
self.assertEqual(record["SY"], "")
self.assertEqual(len(record["GO"]), 1)
self.assertEqual(record["GO"], ['GO:0051538; 3 iron, 4 sulfur cluster binding'])
self.assertEqual(len(record["HI"]), 2)
self.assertEqual(record["HI"][0], "Ligand: Iron; Iron-sulfur; 3Fe-4S.")
self.assertEqual(record["HI"][1], "Ligand: Metal-binding; 3Fe-4S.")
self.assertEqual(record["CA"], "Ligand.")
handle.close()
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner)
| 52.717172
| 345
| 0.65798
|
b234a4f7a926fedcdf5e694535fae83787daf9b7
| 2,951
|
py
|
Python
|
xmonitor/tests/unit/test_misc.py
|
froyobin/xmonitor
|
092dcaa01f834353ffd8dd3c40edf9e97543bfe8
|
[
"Apache-2.0"
] | null | null | null |
xmonitor/tests/unit/test_misc.py
|
froyobin/xmonitor
|
092dcaa01f834353ffd8dd3c40edf9e97543bfe8
|
[
"Apache-2.0"
] | null | null | null |
xmonitor/tests/unit/test_misc.py
|
froyobin/xmonitor
|
092dcaa01f834353ffd8dd3c40edf9e97543bfe8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import six
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from xmonitor.common import crypt
from xmonitor.common import utils
from xmonitor.tests import utils as test_utils
class UtilsTestCase(test_utils.BaseTestCase):
def test_encryption(self):
# Check that original plaintext and unencrypted ciphertext match
# Check keys of the three allowed lengths
key_list = ["1234567890abcdef",
"12345678901234567890abcd",
"1234567890abcdef1234567890ABCDEF"]
plaintext_list = ['']
blocksize = 64
for i in range(3 * blocksize):
text = os.urandom(i)
if six.PY3:
text = text.decode('latin1')
plaintext_list.append(text)
for key in key_list:
for plaintext in plaintext_list:
ciphertext = crypt.urlsafe_encrypt(key, plaintext, blocksize)
self.assertIsInstance(ciphertext, str)
self.assertNotEqual(ciphertext, plaintext)
text = crypt.urlsafe_decrypt(key, ciphertext)
self.assertIsInstance(text, str)
self.assertEqual(plaintext, text)
def test_empty_metadata_headers(self):
"""Ensure unset metadata is not encoded in HTTP headers"""
metadata = {
'foo': 'bar',
'snafu': None,
'bells': 'whistles',
'unset': None,
'empty': '',
'properties': {
'distro': '',
'arch': None,
'user': 'nobody',
},
}
headers = utils.image_meta_to_http_headers(metadata)
self.assertNotIn('x-image-meta-snafu', headers)
self.assertNotIn('x-image-meta-uset', headers)
self.assertNotIn('x-image-meta-snafu', headers)
self.assertNotIn('x-image-meta-property-arch', headers)
self.assertEqual('bar', headers.get('x-image-meta-foo'))
self.assertEqual('whistles', headers.get('x-image-meta-bells'))
self.assertEqual('', headers.get('x-image-meta-empty'))
self.assertEqual('', headers.get('x-image-meta-property-distro'))
self.assertEqual('nobody', headers.get('x-image-meta-property-user'))
| 36.8875
| 78
| 0.627923
|
dffd61a91c20e5fe02c8cf43f6590e29107c62a5
| 1,201
|
py
|
Python
|
8_game_controller/utils/tracer.py
|
ProGabe/teals
|
7ebf0b6e6f81d8a4c44baa7b5d3a9d95267ec1e3
|
[
"MIT"
] | null | null | null |
8_game_controller/utils/tracer.py
|
ProGabe/teals
|
7ebf0b6e6f81d8a4c44baa7b5d3a9d95267ec1e3
|
[
"MIT"
] | 9
|
2019-11-21T13:12:47.000Z
|
2021-02-02T14:52:52.000Z
|
8_game_controller/utils/tracer.py
|
ProGabe/teals
|
7ebf0b6e6f81d8a4c44baa7b5d3a9d95267ec1e3
|
[
"MIT"
] | 2
|
2021-01-25T03:38:30.000Z
|
2021-03-07T23:54:53.000Z
|
from utils.logger import Logger
from datetime import datetime
class TraceDecorator:
def __init__(self, function):
self.function = function
def __call__(self, *args, **kwargs):
# Funciton return value
return_value = None
# Start time
function_start = datetime.now()
# Base message
spacer = '\t' * 8
out_message_base = "Module: {} - Function: {} ".format(
self.function.__module__,
self.function.__name__)
out_message_base += "\n{}ARGUMENTS: {}".format(spacer, args)
try:
# Execute funciton, if exception log it
return_value = self.function(*args, **kwargs)
except Exception as ex:
out_message_base += "\n{}EXCEPTION: {}".format(spacer, str(ex))
# Add function return
out_message_base += "\n{}RETURNS: {}".format(spacer, return_value)
# Add clock to function
span = datetime.now() - function_start
out_message_base += "\n{}EXECUTION: {}".format(spacer, str(span))
# Finally log it and return the function return value
Logger.add_log(out_message_base)
return return_value
| 28.595238
| 75
| 0.603664
|
07b74df737ff0dd7ec147338c1b532272be646df
| 134
|
py
|
Python
|
multivitamin/data/response/config.py
|
keishinkickback/multivitamin
|
edc49267fc5a25182da0e7c7c8bba398225437e5
|
[
"Apache-2.0"
] | 8
|
2019-05-08T20:27:41.000Z
|
2021-04-19T15:17:22.000Z
|
multivitamin/data/response/config.py
|
keishinkickback/multivitamin
|
edc49267fc5a25182da0e7c7c8bba398225437e5
|
[
"Apache-2.0"
] | 9
|
2019-05-17T19:16:50.000Z
|
2022-03-11T23:46:55.000Z
|
multivitamin/data/response/config.py
|
keishinkickback/multivitamin
|
edc49267fc5a25182da0e7c7c8bba398225437e5
|
[
"Apache-2.0"
] | 4
|
2019-05-07T18:00:51.000Z
|
2019-06-22T02:35:51.000Z
|
""" Configuration parameters
"""
SCHEMA_FILE = "schema.avsc"
POINT_EPS = 0.0001
TIME_EPS = 0.016667 # 1/60
SIGFIG = 4
SCHEMA_ID = 68
| 16.75
| 28
| 0.701493
|
4aa16b3507483f6066cf3b19906d3e9d3998930f
| 93
|
py
|
Python
|
defines/urls/production.py
|
jvsouza/ccw
|
6ad0d3459671893340e5a5cd479bfbfa4219f6d5
|
[
"MIT"
] | null | null | null |
defines/urls/production.py
|
jvsouza/ccw
|
6ad0d3459671893340e5a5cd479bfbfa4219f6d5
|
[
"MIT"
] | null | null | null |
defines/urls/production.py
|
jvsouza/ccw
|
6ad0d3459671893340e5a5cd479bfbfa4219f6d5
|
[
"MIT"
] | null | null | null |
from .common import *
urlpatterns = [
path("@dm1n/", admin.site.urls),
] + urlpatterns
| 13.285714
| 36
| 0.634409
|
e6db183eb28c72a4a8429f041a7e02f6603afb85
| 1,835
|
py
|
Python
|
Tests/Benchmark/test.py
|
MartinPdeS/SuPyMode
|
8a0a77ccbdae781d878f3d92a2b476774d666fa5
|
[
"MIT"
] | null | null | null |
Tests/Benchmark/test.py
|
MartinPdeS/SuPyMode
|
8a0a77ccbdae781d878f3d92a2b476774d666fa5
|
[
"MIT"
] | null | null | null |
Tests/Benchmark/test.py
|
MartinPdeS/SuPyMode
|
8a0a77ccbdae781d878f3d92a2b476774d666fa5
|
[
"MIT"
] | 1
|
2021-07-16T14:20:01.000Z
|
2021-07-16T14:20:01.000Z
|
from SuPyMode.Geometry import Geometry, Circle, Fused4, Gradient
from SuPyMode.Solver import SuPySolver
from SuPyMode.sellmeier import Fused_silica
from SuPyMode.fibers import *
"""
FIGURE 2.5 SBB_____________________________________________________
"""
A = Fiber_DCF1300S_20(1.55)
B = Fiber_DCF1300S_33(1.55)
C = Fiber_2028M12(1.55)
Clad = Fused3( Radius = 62.5, Fusion = 0.9, Index = Fused_silica(1.55))
Gradient0 = Gradient(Center = Clad.C[0], Nin=A.nClad, Nout = Fused_silica(1.55), Rout=A.rClad*2)
Clad0 = Circle( Position = Clad.C[0], Radi = A.rClad*2, Gradient=Gradient0 )
Core0 = Circle( Position = Clad.C[0], Radi = A.rCore, Index = A.nCore )
Clad1 = Circle( Position = Clad.C[1], Radi = B.rClad, Index = B.nClad )
Core1 = Circle( Position = Clad.C[1], Radi = B.rCore, Index = B.nCore )
Clad2 = Circle( Position = Clad.C[2], Radi = C.rClad, Index = C.nClad )
Core2 = Circle( Position = Clad.C[2], Radi = C.rCore, Index = C.nCore )
SMF28 = Geometry(Objects = [Clad, Clad0, Clad1, Clad2, Core0, Core1, Core2],
Xbound = [-110, 110],
Ybound = [-110, 110],
Nx = 50,
Ny = 50,
GConv = 0)
#SMF28.Plot()
Sol = SuPySolver(Coupler = SMF28,
Tolerance = 1e-30,
MaxIter = 1000,
nMode = 10,
sMode = 8,
Error = 2)
SuperModes = Sol.GetModes(wavelength = 1.55,
Nstep = 300,
ITRi = 1,
ITRf = 0.05,
RightSymmetry = 0,
TopSymmetry = 0,
Sorting = 'Field')
SuperModes.Plot(Input=['Adiabatic'])
| 32.192982
| 96
| 0.523706
|
1c0e365db691146048b2879d7578c5a0337bb088
| 2,295
|
py
|
Python
|
app/main.py
|
adamsqi/dockerized-product-rest-api
|
0d0db91c4c25d5b82df1d6d006cc264ac8add177
|
[
"MIT"
] | null | null | null |
app/main.py
|
adamsqi/dockerized-product-rest-api
|
0d0db91c4c25d5b82df1d6d006cc264ac8add177
|
[
"MIT"
] | null | null | null |
app/main.py
|
adamsqi/dockerized-product-rest-api
|
0d0db91c4c25d5b82df1d6d006cc264ac8add177
|
[
"MIT"
] | null | null | null |
import os
from flask import Flask, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
app = Flask(__name__)
basedir = os.path.abspath(os.path.dirname(__file__))
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'db.sqlite')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # stop from complaining in the console
db = SQLAlchemy(app)
ma = Marshmallow(app)
class Product(db.Model):
sku = db.Column(db.String(20), primary_key=True)
name = db.Column(db.String(255))
qty = db.Column(db.Integer)
price = db.Column(db.Float)
def __init__(self, sku, name, qty, price):
self.sku = sku
self.name = name
self.qty = qty
self.price = price
db.drop_all()
db.create_all()
class ProductSchema(ma.Schema):
class Meta:
fields = ('sku', 'name', 'qty', 'price')
product_schema = ProductSchema()
products_schema = ProductSchema(many=True)
@app.route('/product/register', methods=['POST'])
def register_product():
sku = request.json['sku']
name = request.json['name']
qty = request.json['qty']
price = request.json['price']
new_product = Product(sku, name, qty, price)
db.session.add(new_product)
db.session.commit()
return product_schema.jsonify(new_product)
@app.route('/product/<sku>', methods=['GET'])
def get_product(sku):
product = Product.query.filter(Product.sku == sku)
result = products_schema.dump(product)
return jsonify(result)
@app.route('/products/available', methods=['GET'])
def get_all_available_products():
all_products = Product.query.filter(Product.qty > 0)
result = products_schema.dump(all_products)
return jsonify(result)
@app.route('/products/sold_out', methods=['GET'])
def get_all_sold_products():
all_products = Product.query.filter(Product.qty == 0)
result = products_schema.dump(all_products)
return jsonify(result)
@app.route('/product/<sku>/set_new_qty/<qty>', methods=['PUT'])
def register_quantity_change(sku, qty):
product = Product.query.filter(Product.sku == sku).first()
product.qty = qty
db.session.commit()
return product_schema.jsonify(product)
if __name__ == '__main__':
app.run(debug=True, port=5000, host='0.0.0.0')
| 26.686047
| 92
| 0.693682
|
a5ba7717cab67f9166d4722c456e2ffed14d82d6
| 264
|
py
|
Python
|
module_02_os/psl_02.01_sys_01.py
|
CodingGearsCourses/Python-3-Standard-Library-Essentials
|
8b80bc8b77fa477b6ccbe2886ed9239c2defdfda
|
[
"Apache-2.0"
] | null | null | null |
module_02_os/psl_02.01_sys_01.py
|
CodingGearsCourses/Python-3-Standard-Library-Essentials
|
8b80bc8b77fa477b6ccbe2886ed9239c2defdfda
|
[
"Apache-2.0"
] | null | null | null |
module_02_os/psl_02.01_sys_01.py
|
CodingGearsCourses/Python-3-Standard-Library-Essentials
|
8b80bc8b77fa477b6ccbe2886ed9239c2defdfda
|
[
"Apache-2.0"
] | null | null | null |
# CodingGears.io
# sys module
import sys
# TODO: version_info
print(sys.version_info)
# TODO: version
print(sys.version)
# TODO: path
print(sys.path)
for p in sys.path:
print(p)
# TODO: platform
print(sys.platform)
# TODO: copyright
print(sys.copyright)
| 12
| 23
| 0.719697
|
d67d8994ad03f0147e6cb1870817a216a6344801
| 11,709
|
py
|
Python
|
modules/pymol/computing.py
|
markdoerr/pymol-open-source
|
b891b59ffaea812600648aa131ea2dbecd59a199
|
[
"CNRI-Python"
] | null | null | null |
modules/pymol/computing.py
|
markdoerr/pymol-open-source
|
b891b59ffaea812600648aa131ea2dbecd59a199
|
[
"CNRI-Python"
] | null | null | null |
modules/pymol/computing.py
|
markdoerr/pymol-open-source
|
b891b59ffaea812600648aa131ea2dbecd59a199
|
[
"CNRI-Python"
] | null | null | null |
#A* -------------------------------------------------------------------
#B* This file contains source code for the PyMOL computer program
#C* Copyright (c) Schrodinger, LLC
#D* -------------------------------------------------------------------
#E* It is unlawful to modify or remove this copyright notice.
#F* -------------------------------------------------------------------
#G* Please see the accompanying LICENSE file for further information.
#H* -------------------------------------------------------------------
#I* Additional authors of this source file include:
#-*
#-*
#-*
#Z* -------------------------------------------------------------------
from __future__ import print_function, absolute_import
import sys
cmd_module = __import__("sys").modules["pymol.cmd"]
from .cmd import _cmd, lock, unlock, Shortcut, \
_feedback, fb_module, fb_mask, \
DEFAULT_ERROR, DEFAULT_SUCCESS, _raising, is_ok, is_error, \
is_list, safe_list_eval, is_string
import traceback
import threading
import os
import pymol
def model_to_sdf_list(self_cmd,model):
from chempy import io
sdf_list = io.mol.toList(model)
fixed = []
restrained = []
at_id = 1
for atom in model.atom:
if atom.flags & 4:
if hasattr(atom,'ref_coord'):
restrained.append( [at_id,atom.ref_coord])
if atom.flags & 8:
fixed.append(at_id)
at_id = at_id + 1
fit_flag = 1
if len(fixed):
fit_flag = 0
sdf_list.append("> <FIXED_ATOMS>\n")
sdf_list.append("+ ATOM\n");
for ID in fixed:
sdf_list.append("| %4d\n"%ID)
sdf_list.append("\n")
if len(restrained):
fit_flag = 0
sdf_list.append("> <RESTRAINED_ATOMS>\n")
sdf_list.append("+ ATOM MIN MAX F_CONST X Y Z\n")
for entry in restrained:
xrd = entry[1]
sdf_list.append("| %4d %6.3f %6.3f %6.3f %10.4f %10.4f %10.4f\n"%
(entry[0],0,0,3,xrd[0],xrd[1],xrd[2]))
sdf_list.append("\n")
electro_mode = int(self_cmd.get('clean_electro_mode'))
if electro_mode == 0:
fit_flag = 0
sdf_list.append("> <ELECTROSTATICS>\n")
sdf_list.append("+ TREATMENT\n")
sdf_list.append("| NONE\n")
sdf_list.append("\n")
sdf_list.append("$$$$\n")
return (fit_flag, sdf_list)
def get_energy_from_rec(rec):
# we really need to replace this with a proper SD parser...
result = 9999.00
try:
rec_list = rec.splitlines()
read_energy = 0
for line in rec_list:
if read_energy == 1:
result = float(line.strip())
break
if line.strip() == '> <MMFF94 energy>':
read_energy = 1
except:
traceback.print_exc()
return result
class CleanJob:
def __init__(self,self_cmd,sele,state=-1,message=None):
self.cmd = self_cmd
if message == '':
message = None
if state<1:
state = self_cmd.get_state()
# this code will moved elsewhere
self.ok = 1
try:
from freemol import mengine
except:
self.ok = 0
print("Error: unable to import freemol.mengine module.")
print("This PyMOL build appears not to include full modeling capabilities.")
return
if self.ok:
if not mengine.validate():
self.ok = 0
print("Error: Unable to validate freemol.mengine")
return
if self.ok:
if self_cmd.count_atoms(sele) > 999:
self.ok = 0
print("Error: Sorry, clean is currently limited to 999 atoms")
return
if not self.ok:
pass
# we can't call warn because this is the not the tcl-tk gui thread
# warn("Please be sure that FreeMOL is correctly installed.")
else:
if message is not None:
self.cmd.do("_ cmd.wizard('message','''%s''')"%message)
obj_list = self_cmd.get_object_list("bymol ("+sele+")")
self.ok = 0
result = None
if is_list(obj_list) and (len(obj_list)==1):
obj_name = obj_list[0]
self_cmd.sculpt_deactivate(obj_name)
# eliminate all sculpting information for object
self.cmd.sculpt_purge()
self.cmd.set("sculpting",0)
state = self_cmd.get_state()
if self_cmd.count_atoms(obj_name+" and flag 2"): # any atoms restrained?
self_cmd.reference("validate",obj_name,state) # then we have reference coordinates
input_model = self_cmd.get_model(obj_name,state=state)
(fit_flag, sdf_list) = model_to_sdf_list(self_cmd,input_model)
input_sdf = ''.join(sdf_list)
if sys.version_info[0] > 2:
input_sdf = input_sdf.encode()
result = mengine.run(input_sdf)
if result is not None:
if len(result):
clean_sdf = result[0]
if sys.version_info[0] > 2:
clean_sdf = clean_sdf.decode()
clean_rec = clean_sdf.split("$$$$")[0]
clean_name = ""
self.energy = get_energy_from_rec(clean_rec)
try:
if len(clean_rec) and int(self.energy) != 9999:
clean_name = "builder_clean_tmp"
self_cmd.set("suspend_updates")
self.ok = 1
else:
self.ok = 0
if self.ok:
self_cmd.read_molstr(clean_rec, clean_name, zoom=0)
# need to insert some error checking here
if clean_name in self_cmd.get_names("objects"):
self_cmd.set("retain_order","1",clean_name)
if fit_flag:
self_cmd.fit(clean_name, obj_name, matchmaker=4,
mobile_state=1, target_state=state)
self_cmd.push_undo(obj_name)
self_cmd.update(obj_name, clean_name, matchmaker=0,
source_state=1, target_state=state)
self_cmd.sculpt_activate(obj_name)
self_cmd.sculpt_deactivate(obj_name)
self.ok = 1
message = "Clean: Finished. Energy = %3.2f" % self.energy
if message is not None:
self.cmd.do("_ cmd.wizard('message','''%s''')"%message)
self.cmd.do("_ wizard")
except ValueError:
self.ok = 0
finally:
self_cmd.delete(clean_name)
self_cmd.unset("suspend_updates")
if not self.ok:
# we can't call warn because this is the not the tcl-tk gui thread
if result is not None:
if len(result)>1:
print("\n=== mengine errors below === ")
print(result[1].replace("\n\n","\n"), end=' ')
print("=== mengine errors above ===\n")
failed_file = "cleanup_failed.sdf"
print("Clean-Error: Structure cleanup failed. Invalid input or software malfuction?")
aromatic = 0
for bond in input_model.bond:
if bond.order == 4:
aromatic = 1
try:
open(failed_file,'wb').write(input_sdf)
print("Clean-Error: Wrote SD file '%s' into the directory:"%failed_file)
print("Clean-Error: '%s'."%os.getcwd())
print("Clean-Error: If you believe PyMOL should be able to handle this structure")
print("Clean-Error: then please email that SD file to help@schrodinger.com. Thank you!")
except IOError:
print("Unabled to write '%s"%failed_file)
if aromatic:
print("Clean-Warning: Please eliminate aromatic bonds and then try again.")
if message is not None:
self_cmd.do("_ wizard")
def _clean(selection, present='', state=-1, fix='', restrain='',
method='mmff', save_undo=1, message=None,
_self=cmd_module):
self_cmd = _self
clean1_sele = "_clean1_tmp"
clean2_sele = "_clean2_tmp"
clean_obj = "_clean_obj"
r = DEFAULT_SUCCESS
c = None
if self_cmd.select(clean1_sele,selection,enable=0)>0:
try:
if present=='':
self_cmd.select(clean2_sele," byres (byres ("+selection+") extend 1)",enable=0) # go out 2 residues
else:
self_cmd.select(clean2_sele, clean1_sele+" or ("+present+")",enable=0)
suspend_undo = self_cmd.get("suspend_undo")
self_cmd.set("suspend_undo", updates=0)
self_cmd.set("suspend_updates")
self_cmd.rename(clean2_sele) # ensure identifiers are unique
self_cmd.create(clean_obj, clean2_sele, zoom=0, source_state=state,target_state=1)
self_cmd.disable(clean_obj)
self_cmd.unset("suspend_updates")
self_cmd.flag(3,clean_obj+" in ("+clean2_sele+" and not "+clean1_sele+")","set")
# fix nearby atoms
self_cmd.h_add(clean_obj) # fill any open valences
if message is None:
at_cnt = self_cmd.count_atoms(clean_obj)
message = 'Clean: Cleaning %d atoms. Please wait...'%at_cnt
c = CleanJob(self_cmd, clean_obj, state, message=message)
if c.ok:
self_cmd.set("suspend_undo", suspend_undo, updates=0)
self_cmd.push_undo(selection)
self_cmd.update(clean1_sele, clean_obj,
source_state=1, target_state=state)
self_cmd.set("suspend_undo", True, updates=0)
self_cmd.delete(clean_obj)
self_cmd.delete(clean1_sele)
self_cmd.delete(clean2_sele)
self_cmd.set("suspend_undo", suspend_undo, updates=0)
except:
traceback.print_exc()
if hasattr(c,"energy"):
return c.energy
else:
return None
def clean(selection, present='', state=-1, fix='', restrain='',
method='mmff', async_=0, save_undo=1, message=None,
_self=cmd_module, **kwargs):
if int(state) == 0:
raise pymol.CmdException('cleaning all states not supported')
async_ = int(kwargs.pop('async', async_))
if kwargs:
raise pymol.CmdException('unknown argument: ' + ', '.join(kwargs))
args = (selection, present, state, fix, restrain, method, save_undo, message, _self)
if not async_:
return _clean(*args)
else:
try:
t = threading.Thread(target=_clean,
args=args)
t.setDaemon(1)
t.start()
except:
traceback.print_exc()
return 0
| 41.228873
| 115
| 0.505338
|
555e9cf1e1431075d74c27fda1fe078448def467
| 15,634
|
py
|
Python
|
examples/pgu/gui/area.py
|
h-vetinari/pybox2d
|
b273568a0e57d51ce41d20d012571186a1839620
|
[
"Zlib"
] | 421
|
2015-01-28T19:53:12.000Z
|
2022-03-29T19:35:00.000Z
|
examples/pgu/gui/area.py
|
h-vetinari/pybox2d
|
b273568a0e57d51ce41d20d012571186a1839620
|
[
"Zlib"
] | 120
|
2015-01-24T00:38:48.000Z
|
2022-03-16T18:47:41.000Z
|
examples/pgu/gui/area.py
|
h-vetinari/pybox2d
|
b273568a0e57d51ce41d20d012571186a1839620
|
[
"Zlib"
] | 124
|
2015-03-11T02:11:25.000Z
|
2022-03-25T10:44:06.000Z
|
"""
"""
import os
from . import pguglobals
from .const import *
from . import surface
from . import container, table
from . import group
from . import basic, button, slider
class SlideBox(container.Container):
"""A scrollable area with no scrollbars.
Example:
c = SlideBox(w,100,100)
c.offset = (10,10)
c.repaint()
"""
_widget = None
def __init__(self, widget, width, height, **params):
"""SlideBox constructor.
Arguments:
widget -- widget to be able to scroll around
width, height -- size of scrollable area
"""
params.setdefault('width', width)
params.setdefault('height', height)
container.Container.__init__(self, **params)
self.offset = [0, 0]
self.widget = widget
@property
def widget(self):
return self._widget
@widget.setter
def widget(self, val):
# Remove the old widget first
if self._widget:
self.remove(self._widget)
# Now add in the new widget
self._widget = val
self.add(val, 0, 0)
def paint(self, s):
#if not hasattr(self,'surface'):
self.surface = pygame.Surface((self.max_rect.w,self.max_rect.h),0,s)
#self.surface.fill((0,0,0,0))
pguglobals.app.theme.render(self.surface,self.style.background,pygame.Rect(0,0,self.max_rect.w,self.max_rect.h))
self.bkgr = pygame.Surface((s.get_width(),s.get_height()),0,s)
self.bkgr.blit(s,(0,0))
container.Container.paint(self,self.surface)
s.blit(self.surface,(-self.offset[0],-self.offset[1]))
self._offset = self.offset[:]
return
def paint_for_when_pygame_supports_other_tricks(self,s):
#this would be ideal if pygame had support for it!
#and if pgu also had a paint(self,s,rect) method to paint small parts
sr = (self.offset[0],self.offset[1],self.max_rect.w,self.max_rect.h)
cr = (-self.offset[0],-self.offset[1],s.get_width(),s.get_height())
s2 = s.subsurface(sr)
s2.set_clip(cr)
container.Container.paint(self,s2)
def proxy_paint(self, s):
container.Container.paint(self, surface.ProxySurface(parent=None,
rect=self.max_rect,
real_surface=s,
offset=self.offset))
def update(self, s):
rects = container.Container.update(self,self.surface)
rets = []
s_rect = pygame.Rect(0,0,s.get_width(),s.get_height())
if self.offset == self._offset:
for r in rects:
r2 = r.move((-self.offset[0],-self.offset[1]))
if r2.colliderect(s_rect):
s.blit(self.surface.subsurface(r),r2)
rets.append(r2)
else:
s.blit(self.bkgr,(0,0))
sub = pygame.Rect(self.offset[0],self.offset[1],min(s.get_width(),self.max_rect.w-self.offset[0]),min(s.get_height(),self.max_rect.h-self.offset[1]))
# print sub
# print self.surface.get_width(),self.surface.get_height()
# print s.get_width(),s.get_height()
# print self.offset
# print self.style.width,self.style.height
s.blit(self.surface.subsurface(sub),(0,0))
rets.append(s_rect)
self._offset = self.offset[:]
return rets
def proxy_update(self, s):
rects = container.Container.update(self, surface.ProxySurface(parent=None,
rect=self.max_rect,
real_surface=s,
offset=self.offset))
result = []
for r in rects: result.append(pygame.Rect(r).move(self.offset))
return result
def resize(self, width=None, height=None):
container.Container.resize(self)
self.max_rect = pygame.Rect(self.widget.rect)
#self.max_rect.w = max(self.max_rect.w,self.style.width)
#self.max_rect.h = max(self.max_rect.h,self.style.height)
return self.style.width,self.style.height
#self.rect = pygame.Rect(self.rect[0], self.rect[1], self.style.width, self.style.height)
def event(self, e):
if e.type in [MOUSEBUTTONDOWN, MOUSEBUTTONUP, MOUSEMOTION]:
pos = (e.pos[0] + self.offset[0], e.pos[1] + self.offset[1])
if self.max_rect.collidepoint(pos):
e_params = {'pos': pos }
if e.type == MOUSEMOTION:
e_params['buttons'] = e.buttons
e_params['rel'] = e.rel
else:
e_params['button'] = e.button
e = pygame.event.Event(e.type, e_params)
container.Container.event(self, e)
#class SlideBox(Area):
# def __init__(self,*args,**params):
# print 'gui.SlideBox','Scheduled to be renamed to Area.'
# Area.__init__(self,*args,**params)
class ScrollArea(table.Table):
"""A scrollable area with scrollbars."""
_widget = None
def __init__(self, widget, width=0, height=0, hscrollbar=True, vscrollbar=True,step=24, **params):
"""ScrollArea constructor.
Arguments:
widget -- widget to be able to scroll around
width, height -- size of scrollable area. Set either to 0 to default to size of widget.
hscrollbar -- set to False if you do not wish to have a horizontal scrollbar
vscrollbar -- set to False if you do not wish to have a vertical scrollbar
step -- set to how far clicks on the icons will step
"""
w= widget
params.setdefault('cls', 'scrollarea')
table.Table.__init__(self, width=width,height=height,**params)
self.sbox = SlideBox(w, width=width, height=height, cls=self.cls+".content")
self.widget = w
self.vscrollbar = vscrollbar
self.hscrollbar = hscrollbar
self.step = step
@property
def widget(self):
return self._widget
@widget.setter
def widget(self, val):
self._widget = val
self.sbox.widget = val
def resize(self,width=None,height=None):
widget = self.widget
box = self.sbox
#self.clear()
table.Table.clear(self)
#print 'resize',self,self._rows
self.tr()
self.td(box)
widget.rect.w, widget.rect.h = widget.resize()
my_width,my_height = self.style.width,self.style.height
if not my_width:
my_width = widget.rect.w
self.hscrollbar = False
if not my_height:
my_height = widget.rect.h
self.vscrollbar = False
box.style.width,box.style.height = my_width,my_height #self.style.width,self.style.height
box.rect.w,box.rect.h = box.resize()
#print widget.rect
#print box.rect
#r = table.Table.resize(self,width,height)
#print r
#return r
#print box.offset
# #this old code automatically adds in a scrollbar if needed
# #but it doesn't always work
# self.vscrollbar = None
# if widget.rect.h > box.rect.h:
# self.vscrollbar = slider.VScrollBar(box.offset[1],0, 65535, 0,step=self.step)
# self.td(self.vscrollbar)
# self.vscrollbar.connect(CHANGE, self._vscrollbar_changed, None)
#
# vs = self.vscrollbar
# vs.rect.w,vs.rect.h = vs.resize()
# box.style.width = self.style.width - vs.rect.w
#
#
# self.hscrollbar = None
# if widget.rect.w > box.rect.w:
# self.hscrollbar = slider.HScrollBar(box.offset[0], 0,65535, 0,step=self.step)
# self.hscrollbar.connect(CHANGE, self._hscrollbar_changed, None)
# self.tr()
# self.td(self.hscrollbar)
#
# hs = self.hscrollbar
# hs.rect.w,hs.rect.h = hs.resize()
# box.style.height = self.style.height - hs.rect.h
xt,xr,xb,xl = pguglobals.app.theme.getspacing(box)
if self.vscrollbar:
self.vscrollbar = slider.VScrollBar(box.offset[1],0, 65535, 0,step=self.step)
self.td(self.vscrollbar)
self.vscrollbar.connect(CHANGE, self._vscrollbar_changed, None)
vs = self.vscrollbar
vs.rect.w,vs.rect.h = vs.resize()
if self.style.width:
box.style.width = self.style.width - (vs.rect.w + xl+xr)
if self.hscrollbar:
self.hscrollbar = slider.HScrollBar(box.offset[0], 0,65535, 0,step=self.step)
self.hscrollbar.connect(CHANGE, self._hscrollbar_changed, None)
self.tr()
self.td(self.hscrollbar)
hs = self.hscrollbar
hs.rect.w,hs.rect.h = hs.resize()
if self.style.height:
box.style.height = self.style.height - (hs.rect.h + xt + xb)
if self.hscrollbar:
hs = self.hscrollbar
hs.min = 0
hs.max = widget.rect.w - box.style.width
hs.style.width = box.style.width
hs.size = hs.style.width * box.style.width / max(1,widget.rect.w)
else:
box.offset[0] = 0
if self.vscrollbar:
vs = self.vscrollbar
vs.min = 0
vs.max = widget.rect.h - box.style.height
vs.style.height = box.style.height
vs.size = vs.style.height * box.style.height / max(1,widget.rect.h)
else:
box.offset[1] = 0
#print self.style.width,box.style.width, hs.style.width
r = table.Table.resize(self,width,height)
return r
def x_resize(self, width=None, height=None):
w,h = table.Table.resize(self, width, height)
if self.hscrollbar:
if self.widget.rect.w <= self.sbox.rect.w:
self.hscrollbar.size = self.hscrollbar.style.width
else:
self.hscrollbar.size = max(20,self.hscrollbar.style.width * self.sbox.rect.w / self.widget.rect.w)
self._hscrollbar_changed(None)
if self.widget.rect.h <= self.sbox.rect.h:
self.vscrollbar.size = self.vscrollbar.style.height
else:
self.vscrollbar.size = max(20,self.vscrollbar.style.height * self.sbox.rect.h / self.widget.rect.h)
self._vscrollbar_changed(None)
return w,h
def _vscrollbar_changed(self, xxx):
#y = (self.widget.rect.h - self.sbox.rect.h) * self.vscrollbar.value / 1000
#if y >= 0: self.sbox.offset[1] = -y
self.sbox.offset[1] = self.vscrollbar.value
self.sbox.reupdate()
def _hscrollbar_changed(self, xxx):
#x = (self.widget.rect.w - self.sbox.rect.w) * self.hscrollbar.value / 1000
#if x >= 0: self.sbox.offset[0] = -x
self.sbox.offset[0] = self.hscrollbar.value
self.sbox.reupdate()
def set_vertical_scroll(self, percents):
#if not self.vscrollbar: return
if not hasattr(self.vscrollbar,'value'): return
self.vscrollbar.value = percents #min(max(percents*10, 0), 1000)
self._vscrollbar_changed(None)
def set_horizontal_scroll(self, percents):
#if not self.hscrollbar: return
if not hasattr(self.hscrollbar,'value'): return
self.hscrollbar.value = percents #min(max(percents*10, 0), 1000)
self._hscrollbar_changed(None)
def event(self, e):
#checking for event recipient
if (table.Table.event(self, e)):
return True
#mouse wheel scrolling
if self.vscrollbar:
if not hasattr(self.vscrollbar,'value'):
return False
if e.type == pygame.locals.MOUSEBUTTONDOWN:
if e.button == 4: #wheel up
self.vscrollbar._click(-1)
return True
elif e.button == 5: #wheel down
self.vscrollbar._click(1)
return True
return False
class _List_Item(button._button):
def __init__(self,label=None,image=None,value=None,**params): #TODO label= could conflict with the module label
#param image: an imagez.Image object (optional)
#param text: a string object
params.setdefault('cls','list.item')
button._button.__init__(self,**params)
self.group = None
self.value = value #(self, value)
self.widget = None
if type(label) == str:
label = basic.Label(label, cls=self.cls+".label")
if image and label:
self.widget = container.Container()
self.widget.add(image, 0, 0)
#HACK: improper use of .resize()
image.rect.w,image.rect.h = image.resize()
self.widget.add(label, image.rect.w, 0)
elif image: self.widget = image
elif label: self.widget = label
self.pcls = ""
def resize(self,width=None,height=None):
self.widget.rect.w,self.widget.rect.h = self.widget.resize()
return self.widget.rect.w,self.widget.rect.h
# self.widget._resize()
# self.rect.w,self.rect.h = self.widget.rect_margin.w,self.widget.rect_margin.h
def event(self,e):
button._button.event(self,e)
if self.group.value == self.value: self.pcls = "down"
def paint(self,s):
if self.group.value == self.value: self.pcls = "down"
self.widget.paint(surface.subsurface(s,self.widget.rect))
def click(self):
self.group.value = self.value
for w in self.group.widgets:
if w != self: w.pcls = ""
class List(ScrollArea):
"""A list of items in an area.
This widget can be a form element, it has a value set to whatever item is selected.
"""
def _change(self, value):
self.value = self.group.value
self.send(CHANGE)
def __init__(self, width, height, **params):
params.setdefault('cls', 'list')
self.table = table.Table(width=width)
ScrollArea.__init__(self, self.table, width, height,hscrollbar=False ,**params)
self.items = []
g = group.Group()
self.group = g
g.connect(CHANGE,self._change,None)
self.value = self.group.value = None
self.add = self._add
self.remove = self._remove
def clear(self):
"""Clear the list."""
self.items = []
self.group = group.Group()
self.group.connect(CHANGE,self._change,None)
self.table.clear()
self.set_vertical_scroll(0)
self.blur(self.myfocus)
def _add(self, label, image = None, value=None):
item = _List_Item(label,image=image,value=value)
self.table.tr()
self.table.add(item)
self.items.append(item)
item.group = self.group
item.group.add(item)
def _remove(self, item):
for i in self.items:
if i.value == item: item = i
if item not in self.items:
return
item.blur()
self.items.remove(item)
self.group.widgets.remove(item)
self.table.remove_row(item.style.row)
| 35.94023
| 161
| 0.564155
|
ad951d1dff16a339b431e0c043aee09ac66480b9
| 1,165
|
py
|
Python
|
renku/service/serializers/version.py
|
lokijuhy/renku-python
|
0bfceafa4e6b4750439ab0ed20c61b0a6ba03a1f
|
[
"Apache-2.0"
] | null | null | null |
renku/service/serializers/version.py
|
lokijuhy/renku-python
|
0bfceafa4e6b4750439ab0ed20c61b0a6ba03a1f
|
[
"Apache-2.0"
] | null | null | null |
renku/service/serializers/version.py
|
lokijuhy/renku-python
|
0bfceafa4e6b4750439ab0ed20c61b0a6ba03a1f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Renku service version controller."""
from marshmallow import Schema, fields
class VersionResponse(Schema):
"""Version response schema."""
latest_version = fields.String()
supported_project_version = fields.Number()
minimum_api_version = fields.String()
maximum_api_version = fields.String()
class VersionResponseRPC(Schema):
"""Version response RPC schema."""
result = fields.Nested(VersionResponse)
| 35.30303
| 75
| 0.756223
|
69ebecfe11b68937423829cfb04a20562fd9878a
| 1,945
|
py
|
Python
|
attic/test/hashdeep.py
|
jdehaan/Funani
|
a70b5743c383c0f071cd93585bb41442052d9b38
|
[
"BSD-2-Clause"
] | null | null | null |
attic/test/hashdeep.py
|
jdehaan/Funani
|
a70b5743c383c0f071cd93585bb41442052d9b38
|
[
"BSD-2-Clause"
] | 3
|
2019-01-09T23:18:11.000Z
|
2020-07-08T10:36:35.000Z
|
attic/test/hashdeep.py
|
jdehaan/Funani
|
a70b5743c383c0f071cd93585bb41442052d9b38
|
[
"BSD-2-Clause"
] | 1
|
2019-04-27T05:27:16.000Z
|
2019-04-27T05:27:16.000Z
|
"""
Build recursive hash of files in directory tree in hashdeep format.
Hashdeep format description:
http://md5deep.sourceforge.net/start-hashdeep.html
hashdeep.py differences from original hashdeep:
- if called without arguments, automatically starts to build
recursive hash starting from the current directory
(original hashdeep waits for the output from stdin)
- uses only sha256 (original uses md5 and sha256)
- uses relative paths only (original works with absolute)
hashdeep.py output example:
$ hashdeep.py
%%%% HASHDEEP-1.0
%%%% size,sha256,filename
##
## $ hashdeep.py
##
5584,28a9b958c3be22ef6bd569bb2f4ea451e6bdcd3b0565c676fbd3645850b4e670,dir/config.h
9236,e77137d635c4e9598d64bc2f3f564f36d895d9cfc5050ea6ca75beafb6e31ec2,dir/INSTALL
1609,343f3e1466662a92fa1804e2fc787e89474295f0ab086059e27ff86535dd1065,dir/README
"""
__author__ = 'anatoly techtonik <techtonik@gmail.com>'
__license__ = 'Public Domain'
__version__ = '1.0'
import os
import os.path as osp
import hashlib
# --- helpers ---
def write(text):
""" helper for writing output, as a single point for replacement """
print(text)
def filehash(filepath):
blocksize = 64*1024
sha = hashlib.sha1()
with open(filepath, 'rb') as fp:
while True:
data = fp.read(blocksize)
if not data:
break
sha.update(data)
return sha.hexdigest()
# --- /helpers ---
write("""\
%%%% HASHDEEP-1.0
%%%% size,sha256,filename
##
## $ hashdeep.py
##""")
ROOT = '.'
for root, dirs, files in os.walk(ROOT):
for fpath in [osp.join(root, f) for f in files]:
size = osp.getsize(fpath)
sha = filehash(fpath)
name = osp.relpath(fpath, ROOT)
write('%s,%s,%s' % (size, sha, name))
#for ignored in ['.hg', '.svn', 'git']:
# if ignored in dirs:
# dirs.remove(ignored)
| 27.785714
| 83
| 0.652956
|
55aa14e299d3dfa2ac28c1599fe4960feaab860c
| 3,012
|
py
|
Python
|
exifread/tags/makernote/fujifilm.py
|
spectralskylight/skydataviewer
|
ac45fde11fb2cd1daa3f09bc30c2fad9391438df
|
[
"BSD-3-Clause"
] | 22
|
2020-03-04T07:01:35.000Z
|
2022-01-02T21:23:48.000Z
|
exifread/tags/makernote/fujifilm.py
|
spectralskylight/skydataviewer
|
ac45fde11fb2cd1daa3f09bc30c2fad9391438df
|
[
"BSD-3-Clause"
] | 5
|
2020-03-04T07:06:02.000Z
|
2021-03-09T22:37:28.000Z
|
exifread/tags/makernote/fujifilm.py
|
spectralskylight/skydataviewer
|
ac45fde11fb2cd1daa3f09bc30c2fad9391438df
|
[
"BSD-3-Clause"
] | 8
|
2020-07-01T06:17:16.000Z
|
2021-12-12T20:51:09.000Z
|
"""
Makernote (proprietary) tag definitions for FujiFilm.
http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/FujiFilm.html
"""
from ...utils import make_string
TAGS = {
0x0000: ('NoteVersion', make_string),
0x0010: ('InternalSerialNumber', ),
0x1000: ('Quality', ),
0x1001: ('Sharpness', {
0x1: 'Soft',
0x2: 'Soft',
0x3: 'Normal',
0x4: 'Hard',
0x5: 'Hard2',
0x82: 'Medium Soft',
0x84: 'Medium Hard',
0x8000: 'Film Simulation'
}),
0x1002: ('WhiteBalance', {
0x0: 'Auto',
0x100: 'Daylight',
0x200: 'Cloudy',
0x300: 'Daylight Fluorescent',
0x301: 'Day White Fluorescent',
0x302: 'White Fluorescent',
0x303: 'Warm White Fluorescent',
0x304: 'Living Room Warm White Fluorescent',
0x400: 'Incandescent',
0x500: 'Flash',
0x600: 'Underwater',
0xf00: 'Custom',
0xf01: 'Custom2',
0xf02: 'Custom3',
0xf03: 'Custom4',
0xf04: 'Custom5',
0xff0: 'Kelvin'
}),
0x1003: ('Saturation', {
0x0: 'Normal',
0x80: 'Medium High',
0x100: 'High',
0x180: 'Medium Low',
0x200: 'Low',
0x300: 'None (B&W)',
0x301: 'B&W Red Filter',
0x302: 'B&W Yellow Filter',
0x303: 'B&W Green Filter',
0x310: 'B&W Sepia',
0x400: 'Low 2',
0x8000: 'Film Simulation'
}),
0x1004: ('Contrast', {
0x0: 'Normal',
0x80: 'Medium High',
0x100: 'High',
0x180: 'Medium Low',
0x200: 'Low',
0x8000: 'Film Simulation'
}),
0x1005: ('ColorTemperature', ),
0x1006: ('Contrast', {
0x0: 'Normal',
0x100: 'High',
0x300: 'Low'
}),
0x100a: ('WhiteBalanceFineTune', ),
0x1010: ('FlashMode', {
0: 'Auto',
1: 'On',
2: 'Off',
3: 'Red Eye Reduction'
}),
0x1011: ('FlashStrength', ),
0x1020: ('Macro', {
0: 'Off',
1: 'On'
}),
0x1021: ('FocusMode', {
0: 'Auto',
1: 'Manual'
}),
0x1022: ('AFPointSet', {
0: 'Yes',
1: 'No'
}),
0x1023: ('FocusPixel', ),
0x1030: ('SlowSync', {
0: 'Off',
1: 'On'
}),
0x1031: ('PictureMode', {
0: 'Auto',
1: 'Portrait',
2: 'Landscape',
4: 'Sports',
5: 'Night',
6: 'Program AE',
256: 'Aperture Priority AE',
512: 'Shutter Priority AE',
768: 'Manual Exposure'
}),
0x1032: ('ExposureCount', ),
0x1100: ('MotorOrBracket', {
0: 'Off',
1: 'On'
}),
0x1210: ('ColorMode', {
0x0: 'Standard',
0x10: 'Chrome',
0x30: 'B & W'
}),
0x1300: ('BlurWarning', {
0: 'Off',
1: 'On'
}),
0x1301: ('FocusWarning', {
0: 'Off',
1: 'On'
}),
0x1302: ('ExposureWarning', {
0: 'Off',
1: 'On'
}),
}
| 23.169231
| 67
| 0.462151
|
6df4257734cb62e7be2f15f62577a46bf846df6d
| 7,183
|
py
|
Python
|
Synthesis/model/reflection_synthesis.py
|
XUHUAKing/Single-Image-Reflection-Removal-Beyond-Linearity
|
bc3a2ffc56e91f30a594c95c138d074943eac0af
|
[
"MIT"
] | 50
|
2019-07-31T13:59:46.000Z
|
2022-03-27T05:23:39.000Z
|
Synthesis/model/reflection_synthesis.py
|
XUHUAKing/Single-Image-Reflection-Removal-Beyond-Linearity
|
bc3a2ffc56e91f30a594c95c138d074943eac0af
|
[
"MIT"
] | 5
|
2019-10-30T12:57:05.000Z
|
2021-04-22T22:48:42.000Z
|
Synthesis/model/reflection_synthesis.py
|
XUHUAKing/Single-Image-Reflection-Removal-Beyond-Linearity
|
bc3a2ffc56e91f30a594c95c138d074943eac0af
|
[
"MIT"
] | 19
|
2019-08-01T00:45:33.000Z
|
2022-02-05T15:39:39.000Z
|
import torch
from collections import OrderedDict
from torch.autograd import Variable
import util.util as util
from model.image_pool import ImagePool
from .base_model import BaseModel
from . import networks
import numpy as np
import torch.nn as nn
class ReflectionSynthesisModel(BaseModel):
def name(self):
return 'ReflectionSynthesisModel'
def initialize(self, opt):
BaseModel.initialize(self, opt)
# load/define networks
self.netG = networks.define_G(opt.input_nc, opt.output_nc,
opt.ngf, opt.which_model_netG, opt.norm, not opt.no_dropout, opt.init_type, self.gpu_ids)
if self.isTrain:
use_sigmoid = opt.no_lsgan
self.netD = networks.define_D(opt.output_nc, opt.ndf,
opt.which_model_netD,
opt.n_layers_D, opt.norm, use_sigmoid, opt.init_type, self.gpu_ids)
if not self.isTrain or opt.continue_train:
which_epoch = opt.which_epoch
self.load_network(self.netG, 'G', which_epoch)
if self.isTrain:
self.load_network(self.netD, 'D', which_epoch)
if self.isTrain:
self.mix_AB_pool = ImagePool(opt.pool_size)
# define loss functions
self.criterionGAN = networks.GANLoss(use_lsgan=not opt.no_lsgan, tensor=self.Tensor)
self.criterionL1 = torch.nn.L1Loss()
self.criterionL2 = torch.nn.MSELoss()
# initialize optimizers
self.optimizer_G = torch.optim.Adam(self.netG.parameters(),
lr=opt.lr, betas=(opt.beta1, 0.999))
self.optimizer_D = torch.optim.Adam(self.netD.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
self.optimizers = []
self.schedulers = []
self.optimizers.append(self.optimizer_G)
self.optimizers.append(self.optimizer_D)
for optimizer in self.optimizers:
self.schedulers.append(networks.get_scheduler(optimizer, opt))
print('---------- Networks initialized -------------')
networks.print_network(self.netG)
if self.isTrain:
networks.print_network(self.netD)
print('-----------------------------------------------')
def set_input(self, input):
input_A = input['A']
input_A_origin = input['A_origin']
input_B = input['B']
One = torch.ones(input_A.shape)
if len(self.gpu_ids) > 0:
input_A = input_A.cuda(self.gpu_ids[0], async=True)
input_B = input_B.cuda(self.gpu_ids[0], async=True)
One = One.cuda(self.gpu_ids[0], async=True)
self.input_A = input_A
self.input_A_origin = input_A_origin
self.input_B = input_B
self.One = One
if self.opt.phase == 'train':
input_C = input['C']
input_C = input_C.cuda(self.gpu_ids[0], async=True)
self.input_C = input_C
self.image_paths = input['A_paths']
def forward(self):
self.real_A = self.input_A
self.real_A_origin = self.input_A_origin
self.real_B = self.input_B
if self.opt.phase == 'train':
self.real_C = self.input_C
def test(self):
real_A = self.input_A
real_A_origin = self.input_A_origin
real_B = self.input_B
concat_AB = torch.cat((real_B, real_A), dim=1)
W_A_reflection = self.netG(concat_AB)
W_A_reflection_revise = self.One - W_A_reflection
mix_AB = W_A_reflection * real_A + W_A_reflection_revise * real_B
self.real_A = real_A.data
self.real_A_origin = real_A_origin.data
self.real_B = real_B.data
self.W_A_reflection = W_A_reflection.data
self.mix_AB = mix_AB.data
# get image paths
def get_image_paths(self):
return self.image_paths
def backward_D_basic(self, netD, real, fake):
# Real
pred_real = netD(real)
loss_D_real = self.criterionGAN(pred_real, True)
# Fake
pred_fake = netD(fake.detach())
loss_D_fake = self.criterionGAN(pred_fake, False)
# Combined loss
loss_D = (loss_D_real + loss_D_fake) * 0.5
# backward
loss_D.backward()
return loss_D
def backward_D(self):
mix_AB = self.mix_AB_pool.query(self.mix_AB)
loss_D = self.backward_D_basic(self.netD, self.real_C, mix_AB)
self.loss_D = loss_D.item()
def backward_G(self):
# GAN loss D(G(concat_AB))
reflection = self.real_A
transmission = self.real_B
concat_AB = torch.cat((transmission, reflection), dim=1)
W = self.netG(concat_AB)
W_revise = self.One - W
mix_AB = W_revise * transmission + W * reflection
pred_fake = self.netD(mix_AB)
loss_GAN = self.criterionGAN(pred_fake, True)
# for smoothness loss
smooth_y_W = self.criterionL2(W[:, :, 1:, :], W.detach()[:, :, :-1, :])
smooth_x_W = self.criterionL2(W[:, :, :, 1:], W.detach()[:, :, :, :-1])
loss_Smooth_W = smooth_y_W + smooth_x_W
loss_G = loss_GAN + loss_Smooth_W * 10
loss_G.backward()
self.mix_AB = mix_AB.data
self.reflection = reflection.data
self.transmission = transmission.data
self.W = W.data
self.W_revise = W_revise.data
self.loss_GAN = loss_GAN.item()
self.loss_Smooth_W = loss_Smooth_W.item()
def optimize_parameters(self):
# forward
self.forward()
# G
self.optimizer_G.zero_grad()
self.backward_G()
self.optimizer_G.step()
# D
self.optimizer_D.zero_grad()
self.backward_D()
self.optimizer_D.step()
def get_current_errors(self):
ret_errors = OrderedDict([('loss_GAN', self.loss_GAN),
('loss_Smooth_W', self.loss_Smooth_W),
('loss_D', self.loss_D)])
return ret_errors
def get_current_visuals_train(self):
reflection = util.tensor2im(self.reflection)
transmission = util.tensor2im(self.transmission)
real_C = util.tensor2im(self.input_C)
mix_AB = util.tensor2im(self.mix_AB)
ret_visuals = OrderedDict([('reflection', reflection),('transmission', transmission),
('real_C', real_C), ('mix_AB', mix_AB)])
return ret_visuals
def get_current_visuals_test(self):
real_A = util.tensor2im(self.real_A)
real_A_origin = util.tensor2im(self.real_A_origin)
real_B = util.tensor2im(self.real_B)
mix_AB = util.tensor2im(self.mix_AB)
ret_visuals = OrderedDict([('reflection', real_A), ('transmission', real_B), ('reflection_origin', real_A_origin),
('mix_AB', mix_AB)])
return ret_visuals
def save(self, label):
self.save_network(self.netG, 'G', label, self.gpu_ids)
self.save_network(self.netD, 'D', label, self.gpu_ids)
| 37.411458
| 129
| 0.59251
|
189a4234687e145bda60a4f29eedcbb9789951be
| 2,212
|
py
|
Python
|
tests/test_reading.py
|
jsreyl/nmrstarlib
|
e601859760966f853fd5d07af454a9c9cb75dbeb
|
[
"MIT"
] | 5
|
2017-03-17T17:25:05.000Z
|
2022-01-19T14:23:30.000Z
|
tests/test_reading.py
|
jsreyl/nmrstarlib
|
e601859760966f853fd5d07af454a9c9cb75dbeb
|
[
"MIT"
] | 8
|
2016-11-09T07:46:28.000Z
|
2021-06-21T04:03:31.000Z
|
tests/test_reading.py
|
jsreyl/nmrstarlib
|
e601859760966f853fd5d07af454a9c9cb75dbeb
|
[
"MIT"
] | 2
|
2016-08-25T17:51:18.000Z
|
2021-05-21T21:51:39.000Z
|
import pytest
import nmrstarlib
@pytest.mark.parametrize("source", [
("tests/example_data/NMRSTAR3/bmr18569.str",
"tests/example_data/NMRSTAR2/bmr18569.str",
"tests/example_data/CIF/2rpv.cif")
])
def test_from_local_file(source):
starfile_generator = nmrstarlib.read_files(*source)
starfiles_list = list(starfile_generator)
starfiles_ids_set = set(sf.id for sf in starfiles_list)
assert starfiles_ids_set.issubset({"18569", "2RPV"})
@pytest.mark.parametrize("source", [
("15000",
"18569")
])
def test_from_bmrbid(source):
starfile_generator = nmrstarlib.read_files(*source)
starfile1 = next(starfile_generator)
starfile2 = next(starfile_generator)
assert starfile1.id in ("15000", "18569") and starfile2.id in ("15000", "18569")
@pytest.mark.parametrize("source", [
("http://rest.bmrb.wisc.edu/bmrb/NMR-STAR3/15000",
"http://rest.bmrb.wisc.edu/bmrb/NMR-STAR3/18569",
"https://files.rcsb.org/view/2rpv.cif")
])
def test_from_url(source):
starfile_generator = nmrstarlib.read_files(*source)
starfile1 = next(starfile_generator)
starfile2 = next(starfile_generator)
assert starfile1.id in ("15000", "18569") and starfile2.id in ("15000", "18569")
@pytest.mark.parametrize("source", [
"tests/example_data/NMRSTAR3/starfiles_directory",
"tests/example_data/NMRSTAR2/starfiles_directory",
"tests/example_data/CIF/ciffiles_directory",
"tests/example_data/NMRSTAR3/starfiles_archive.zip",
"tests/example_data/NMRSTAR2/starfiles_archive.zip",
"tests/example_data/CIF/ciffiles_archive.zip",
"tests/example_data/NMRSTAR3/starfiles_archive.tar.gz",
"tests/example_data/NMRSTAR2/starfiles_archive.tar.gz",
"tests/example_data/CIF/ciffiles_archive.tar.gz",
"tests/example_data/NMRSTAR3/starfiles_archive.tar.bz2",
"tests/example_data/NMRSTAR2/starfiles_archive.tar.bz2",
"tests/example_data/CIF/ciffiles_archive.tar.bz2"
])
def test_reading(source):
starfile_generator = nmrstarlib.read_files(source)
starfiles_list = list(starfile_generator)
starfiles_ids_set = set(sf.id for sf in starfiles_list)
assert starfiles_ids_set.issubset({"15000", "18569", "2RPV", "2FRG"})
| 37.491525
| 84
| 0.737342
|
6dc36c74f54359a1c1c8d79bafcd69db6bbd6599
| 1,916
|
py
|
Python
|
testes.py
|
analytics-ufcg/leggo-trends
|
85a3cf03a23f7277af4c4ae90a9ba174401f1df7
|
[
"MIT"
] | null | null | null |
testes.py
|
analytics-ufcg/leggo-trends
|
85a3cf03a23f7277af4c4ae90a9ba174401f1df7
|
[
"MIT"
] | 10
|
2020-04-15T11:30:28.000Z
|
2022-02-18T15:06:11.000Z
|
testes.py
|
parlametria/leggoTrends
|
ac46dde3496bcda6100d3be44412a890e3d27ecf
|
[
"MIT"
] | 1
|
2019-08-06T13:41:19.000Z
|
2019-08-06T13:41:19.000Z
|
# -*- coding: utf-8 -*-
from fetch_google_trends import *
from datetime import date, datetime
from datetime import timedelta
import pandas as pd
def test_get_data_inicial():
assert get_data_inicial('2019-08-22') == '2019-08-22', 'Deveria ser 2019-08-22'
assert get_data_inicial('2015-08-22') == (date.today() - timedelta(days=180)).strftime('%Y-%m-%d'), 'Deveria ser a data de 6 meses atrás'
def test_formata_timeframe():
assert formata_timeframe('2019-03-10') == '2019-03-10' + ' ' + date.today().strftime('%Y-%m-%d'), 'Deveria ser a data de 6 meses atrás até hoje'
def test_calcula_maximo():
lista_com_dados_teste = [
[16526, '2019-02-18', 'camara', 0, 0, 0, False],
[16526, '2019-02-18', 'camara', 100, 0, 20, False],
[16526, '2019-02-18', 'camara', 0, 100, 30, False],
[16526, '2019-02-18', 'camara', 45, 46, 100, False],
[16526, '2019-02-25', 'camara', 46, 45, 45, False]
]
lista_com_dados_gabarito = [
[16526, '2019-02-18', 'camara', 0, 0, 0, False, 0, 0, 0.0],
[16526, '2019-02-18', 'camara', 100, 0, 20, False, 100, 20, 100.0],
[16526, '2019-02-18', 'camara', 0, 100, 30, False, 100, 30, 100.0],
[16526, '2019-02-18', 'camara', 45, 46, 100, False, 46, 100, 100.0],
[16526, '2019-02-25', 'camara', 46, 45, 45, False, 46, 45, 46]
]
df_teste = pd.DataFrame(lista_com_dados_teste, columns = ['id_ext', 'date', 'casa', 'pl', 'apelido', 'rel', 'isPartial'])
df_gabarito = pd.DataFrame(lista_com_dados_gabarito, columns = ['id_ext', 'date', 'casa', 'pl', 'apelido', 'rel', 'isPartial', 'max_pressao_principal', 'max_pressao_rel', 'maximo_geral'])
assert df_gabarito.equals(calcula_maximos(df_gabarito, 'apelido', 'pl')), 'Máximos diferentes'
if __name__ == "__main__":
test_get_data_inicial()
test_formata_timeframe()
test_calcula_maximo()
print('Parabéns! Tudo bacana!')
| 47.9
| 191
| 0.626827
|
07d4a95d54dd20cfd50590d3fac411e2686c0df5
| 840
|
py
|
Python
|
setup.py
|
bernd-clemenz/aloga
|
ea932aa1842eb4752408331257d50ab89f20de12
|
[
"MIT"
] | null | null | null |
setup.py
|
bernd-clemenz/aloga
|
ea932aa1842eb4752408331257d50ab89f20de12
|
[
"MIT"
] | null | null | null |
setup.py
|
bernd-clemenz/aloga
|
ea932aa1842eb4752408331257d50ab89f20de12
|
[
"MIT"
] | null | null | null |
#
# Access-Log file analysis.
# (c) ISC Clemenz & Weinbrecht GmbH 2018
#
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="aloga",
version="0.0.6",
author="ISC Clemenz & Weinbrecht GmbH",
author_email="info@isc-software.de",
description="Access-log file analysis",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/bernd-clemenz/aloga",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=(
"argparse",
"antlr4-python3-runtime",
"requests",
"numpy",
"matplotlib"
),
)
| 25.454545
| 50
| 0.633333
|
2781fd17d2bdc80ebb0024ee3876a221ea0c8145
| 24,517
|
py
|
Python
|
lib/ppymilterbase.py
|
vimmaniac/ppymilter
|
8e5f6d2cddb14d5e8f870be98ff96623e736033b
|
[
"Apache-2.0"
] | null | null | null |
lib/ppymilterbase.py
|
vimmaniac/ppymilter
|
8e5f6d2cddb14d5e8f870be98ff96623e736033b
|
[
"Apache-2.0"
] | null | null | null |
lib/ppymilterbase.py
|
vimmaniac/ppymilter
|
8e5f6d2cddb14d5e8f870be98ff96623e736033b
|
[
"Apache-2.0"
] | null | null | null |
# $Id$
# ==============================================================================
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Pure python milter interface (does not use libmilter.a).
# Handles parsing of milter protocol data (e.g. over a network socket)
# and provides standard arguments to the callbacks in your handler class.
#
# For details of the milter protocol see:
# http://search.cpan.org/src/AVAR/Sendmail-PMilter-0.96/doc/milter-protocol.txt
#
__author__ = 'Eric DeFriez'
import binascii
import logging
import os
import socket
import struct
import sys
import types
MILTER_VERSION = 2 # Milter version we claim to speak (from pmilter)
# Potential milter command codes and their corresponding PpyMilter callbacks.
# From sendmail's include/libmilter/mfdef.h
SMFIC_ABORT = 'A' # "Abort"
SMFIC_BODY = 'B' # "Body chunk"
SMFIC_CONNECT = 'C' # "Connection information"
SMFIC_MACRO = 'D' # "Define macro"
SMFIC_BODYEOB = 'E' # "final body chunk (End)"
SMFIC_HELO = 'H' # "HELO/EHLO"
SMFIC_HEADER = 'L' # "Header"
SMFIC_MAIL = 'M' # "MAIL from"
SMFIC_EOH = 'N' # "EOH"
SMFIC_OPTNEG = 'O' # "Option negotation"
SMFIC_RCPT = 'R' # "RCPT to"
SMFIC_QUIT = 'Q' # "QUIT"
SMFIC_DATA = 'T' # "DATA"
SMFIC_UNKNOWN = 'U' # "Any unknown command"
COMMANDS = {
SMFIC_ABORT: 'Abort',
SMFIC_BODY: 'Body',
SMFIC_CONNECT: 'Connect',
SMFIC_MACRO: 'Macro',
SMFIC_BODYEOB: 'EndBody',
SMFIC_HELO: 'Helo',
SMFIC_HEADER: 'Header',
SMFIC_MAIL: 'MailFrom',
SMFIC_EOH: 'EndHeaders',
SMFIC_OPTNEG: 'OptNeg',
SMFIC_RCPT: 'RcptTo',
SMFIC_QUIT: 'Quit',
SMFIC_DATA: 'Data',
SMFIC_UNKNOWN: 'Unknown',
}
# To register/mask callbacks during milter protocol negotiation with sendmail.
# From sendmail's include/libmilter/mfdef.h
NO_CALLBACKS = 127 # (all seven callback flags set: 1111111)
CALLBACKS = {
'OnConnect': 1, # 0x01 SMFIP_NOCONNECT # Skip SMFIC_CONNECT
'OnHelo': 2, # 0x02 SMFIP_NOHELO # Skip SMFIC_HELO
'OnMailFrom': 4, # 0x04 SMFIP_NOMAIL # Skip SMFIC_MAIL
'OnRcptTo': 8, # 0x08 SMFIP_NORCPT # Skip SMFIC_RCPT
'OnBody': 16, # 0x10 SMFIP_NOBODY # Skip SMFIC_BODY
'OnHeader': 32, # 0x20 SMFIP_NOHDRS # Skip SMFIC_HEADER
'OnEndHeaders': 64, # 0x40 SMFIP_NOEOH # Skip SMFIC_EOH
}
# Acceptable response commands/codes to return to sendmail (with accompanying
# command data). From sendmail's include/libmilter/mfdef.h
RESPONSE = {
'ADDRCPT' : '+', # SMFIR_ADDRCPT # "add recipient"
'DELRCPT' : '-', # SMFIR_DELRCPT # "remove recipient"
'ACCEPT' : 'a', # SMFIR_ACCEPT # "accept"
'REPLBODY' : 'b', # SMFIR_REPLBODY # "replace body (chunk)"
'CONTINUE' : 'c', # SMFIR_CONTINUE # "continue"
'DISCARD' : 'd', # SMFIR_DISCARD # "discard"
'CONNFAIL' : 'f', # SMFIR_CONN_FAIL # "cause a connection failure"
'ADDHEADER' : 'h', # SMFIR_ADDHEADER # "add header"
'INSHEADER' : 'i', # SMFIR_INSHEADER # "insert header"
'CHGHEADER' : 'm', # SMFIR_CHGHEADER # "change header"
'PROGRESS' : 'p', # SMFIR_PROGRESS # "progress"
'QUARANTINE' : 'q', # SMFIR_QUARANTINE # "quarantine"
'REJECT' : 'r', # SMFIR_REJECT # "reject"
'SETSENDER' : 's', # v3 only?
'TEMPFAIL' : 't', # SMFIR_TEMPFAIL # "tempfail"
'REPLYCODE' : 'y', # SMFIR_REPLYCODE # "reply code etc"
}
def printchar(char):
"""Useful debugging function for milter developers."""
print ('char: %s [qp=%s][hex=%s][base64=%s]' %
(char, binascii.b2a_qp(char), binascii.b2a_hex(char),
binascii.b2a_base64(char)))
def CanonicalizeAddress(addr):
"""Strip angle brackes from email address iff not an empty address ("<>").
Args:
addr: the email address to canonicalize (strip angle brackets from).
Returns:
The addr with leading and trailing angle brackets removed unless
the address is "<>" (in which case the string is returned unchanged).
"""
if addr == '<>': return addr
return addr.lstrip('<').rstrip('>')
class PpyMilterException(Exception):
"""Parent of all other PpyMilter exceptions. Subclass this: do not
construct or catch explicitly!"""
class PpyMilterPermFailure(PpyMilterException):
"""Milter exception that indicates a perment failure."""
class PpyMilterTempFailure(PpyMilterException):
"""Milter exception that indicates a temporary/transient failure."""
class PpyMilterCloseConnection(PpyMilterException):
"""Exception that indicates the server should close the milter connection."""
class PpyMilterActionError(PpyMilterException):
"""Exception raised when an action is performed that was not negotiated."""
class PpyMilterDispatcher(object):
"""Dispatcher class for a milter server. This class accepts entire
milter commands as a string (command character + binary data), parses
the command and binary data appropriately and invokes the appropriate
callback function in a milter_class instance. One PpyMilterDispatcher
per socket connection. One milter_class instance per PpyMilterDispatcher
(per socket connection)."""
def __init__(self, milter_class):
"""Construct a PpyMilterDispatcher and create a private
milter_class instance.
Args:
milter_class: A class (not an instance) that handles callbacks for
milter commands (e.g. a child of the PpyMilter class).
"""
self.__milter = milter_class()
def Dispatch(self, data):
"""Callback function for the milter socket server to handle a single
milter command. Parses the milter command data, invokes the milter
handler, and formats a suitable response for the server to send
on the socket.
Args:
data: A (binary) string (consisting of a command code character
followed by binary data for that command code).
Returns:
A binary string to write on the socket and return to sendmail. The
string typically consists of a RESPONSE[] command character then
some response-specific protocol data.
Raises:
PpyMilterCloseConnection: Indicating the (milter) connection should
be closed.
"""
(cmd, data) = (data[0], data[1:])
try:
if cmd not in COMMANDS:
logging.warn('Unknown command code: "%s" ("%s")', cmd, data)
return RESPONSE['CONTINUE']
command = COMMANDS[cmd]
parser_callback_name = '_Parse%s' % command
handler_callback_name = 'On%s' % command
if not hasattr(self, parser_callback_name):
logging.error('No parser implemented for "%s"', command)
return RESPONSE['CONTINUE']
if not hasattr(self.__milter, handler_callback_name):
logging.warn('Unimplemented command: "%s" ("%s")', command, data)
return RESPONSE['CONTINUE']
parser = getattr(self, parser_callback_name)
callback = getattr(self.__milter, handler_callback_name)
args = parser(cmd, data)
return callback(*args)
except PpyMilterTempFailure, e:
logging.info('Temp Failure: %s', str(e))
return RESPONSE['TEMPFAIL']
except PpyMilterPermFailure, e:
logging.info('Perm Failure: %s', str(e))
return RESPONSE['REJECT']
return RESPONSE['CONTINUE']
def _ParseOptNeg(self, cmd, data):
"""Parse the 'OptNeg' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple consisting of:
cmd: The single character command code representing this command.
ver: The protocol version we support.
actions: Bitmask of the milter actions we may perform
(see "PpyMilter.ACTION_*").
protocol: Bitmask of the callback functions we are registering.
"""
(ver, actions, protocol) = struct.unpack('!III', data)
return (cmd, ver, actions, protocol)
def _ParseMacro(self, cmd, data):
"""Parse the 'Macro' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple consisting of:
cmd: The single character command code representing this command.
macro: The single character command code this macro is for.
data: A list of strings alternating between name, value of macro.
"""
(macro, data) = (data[0], data[1:])
return (cmd, macro, data.split('\0'))
def _ParseConnect(self, cmd, data):
"""Parse the 'Connect' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd, hostname, family, port, address) where:
cmd: The single character command code representing this command.
hostname: The hostname that originated the connection to the MTA.
family: Address family for connection (see sendmail libmilter/mfdef.h).
port: The network port if appropriate for the connection.
address: Remote address of the connection (e.g. IP address).
"""
(hostname, data) = data.split('\0', 1)
family = struct.unpack('c', data[0])[0]
port = struct.unpack('!H', data[1:3])[0]
address = data[3:]
return (cmd, hostname, family, port, address)
def _ParseHelo(self, cmd, data):
"""Parse the 'Helo' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd, data) where:
cmd: The single character command code representing this command.
data: TODO: parse this better
"""
return (cmd, data)
def _ParseMailFrom(self, cmd, data):
"""Parse the 'MailFrom' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd, mailfrom, esmtp_info) where:
cmd: The single character command code representing this command.
mailfrom: The canonicalized MAIL From email address.
esmtp_info: Extended SMTP (esmtp) info as a list of strings.
"""
(mailfrom, esmtp_info) = data.split('\0', 1)
return (cmd, CanonicalizeAddress(mailfrom), esmtp_info.split('\0'))
def _ParseRcptTo(self, cmd, data):
"""Parse the 'RcptTo' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd, rcptto, emstp_info) where:
cmd: The single character command code representing this command.
rcptto: The canonicalized RCPT To email address.
esmtp_info: Extended SMTP (esmtp) info as a list of strings.
"""
(rcptto, esmtp_info) = data.split('\0', 1)
return (cmd, CanonicalizeAddress(rcptto), esmtp_info.split('\0'))
def _ParseHeader(self, cmd, data):
"""Parse the 'Header' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd, key, val) where:
cmd: The single character command code representing this command.
key: The name of the header.
val: The value/data for the header.
"""
(key, val) = data.split('\0', 1)
return (cmd, key, val)
def _ParseEndHeaders(self, cmd, data):
"""Parse the 'EndHeaders' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd) where:
cmd: The single character command code representing this command.
"""
return (cmd)
def _ParseBody(self, cmd, data):
"""Parse the 'Body' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd, data) where:
cmd : The single character command code representing this command.
data: TODO: parse this better
"""
return (cmd, data)
def _ParseEndBody(self, cmd, data):
"""Parse the 'EndBody' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: No data is sent for this command.
Returns:
A tuple (cmd) where:
cmd: The single character command code representing this command.
"""
return (cmd)
def _ParseQuit(self, cmd, data):
"""Parse the 'Quit' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd) where:
cmd: The single character command code representing this command.
"""
return (cmd)
def _ParseAbort(self, cmd, data):
"""Parse the 'Abort' milter data into arguments for the milter handler.
Args:
cmd: A single character command code representing this command.
data: Command-specific milter data to be unpacked/parsed.
Returns:
A tuple (cmd) where:
cmd: The single character command code representing this command.
"""
return (cmd)
class PpyMilter(object):
"""Pure python milter handler base class. Inherit from this class
and override any On*() commands you would like your milter to handle.
Register any actions your milter may perform using the Can*() functions
during your __init__() (after calling PpyMilter.__init()__!) to ensure
your milter's actions are accepted.
Pass a reference to your handler class to a python milter socket server
(e.g. AsyncPpyMilterServer) to create a stand-alone milter
process than invokes your custom handler.
"""
# Actions we tell sendmail we may perform
# PpyMilter users invoke self.CanFoo() during their __init__()
# to toggle these settings.
ACTION_ADDHDRS = 1 # 0x01 SMFIF_ADDHDRS # Add headers
ACTION_CHGBODY = 2 # 0x02 SMFIF_CHGBODY # Change body chunks
ACTION_ADDRCPT = 4 # 0x04 SMFIF_ADDRCPT # Add recipients
ACTION_DELRCPT = 8 # 0x08 SMFIF_DELRCPT # Remove recipients
ACTION_CHGHDRS = 16 # 0x10 SMFIF_CHGHDRS # Change or delete headers
ACTION_QUARANTINE = 32 # 0x20 SMFIF_QUARANTINE # Quarantine message
def __init__(self):
"""Construct a PpyMilter object. Sets callbacks and registers
callbacks. Make sure you call this directly "PpyMilter.__init__(self)"
at the beginning of your __init__() if you override the class constructor!
"""
self.__actions = 0
self.__protocol = NO_CALLBACKS
for (callback, flag) in CALLBACKS.iteritems():
if hasattr(self, callback):
self.__protocol &= ~flag
def Accept(self):
"""Create an 'ACCEPT' response to return to the milter dispatcher."""
return RESPONSE['ACCEPT']
def Reject(self):
"""Create a 'REJECT' response to return to the milter dispatcher."""
return RESPONSE['REJECT']
def Discard(self):
"""Create a 'DISCARD' response to return to the milter dispatcher."""
return RESPONSE['DISCARD']
def TempFail(self):
"""Create a 'TEMPFAIL' response to return to the milter dispatcher."""
return RESPONSE['TEMPFAIL']
def Continue(self):
"""Create an '' response to return to the milter dispatcher."""
return RESPONSE['CONTINUE']
def CustomReply(self, code, text):
"""Create a 'REPLYCODE' (custom) response to return to the milter
dispatcher.
Args:
code: Integer or digit string (should be \d\d\d). NOTICE: A '421' reply
code will cause sendmail to close the connection after responding!
(https://www.sendmail.org/releases/8.13.0.html)
text: Code reason/explaination to send to the user.
"""
return '%s%s %s\0' % (RESPONSE['REPLYCODE'], code, text)
def AddRecipient(self, rcpt):
"""Construct an ADDRCPT reply that the client can send during OnEndBody.
Args:
rcpt: The recipient to add, should have <> around it.
"""
self.__VerifyCapability(self.ACTION_ADDRCPT)
return '%s%s\0' % (RESPONSE['ADDRCPT'], rcpt)
def AddHeader(self, name, value):
"""Construct an ADDHEADER reply that the client can send during OnEndBody.
Args:
name: The name of the header to add
value: The value of the header
"""
self.__VerifyCapability(self.ACTION_ADDHDRS)
return '%s%s\0%s\0' % (RESPONSE['ADDHEADER'], name, value)
def DeleteRecipient(self, rcpt):
"""Construct an DELRCPT reply that the client can send during OnEndBody.
Args:
rcpt: The recipient to delete, should have <> around it.
"""
self.__VerifyCapability(self.ACTION_DELRCPT)
return '%s%s\0' % (RESPONSE['DELRCPT'], rcpt)
def InsertHeader(self, index, name, value):
"""Construct an INSHEADER reply that the client can send during OnEndBody.
Args:
index: The index to insert the header at. 0 is above all headers.
A number greater than the number of headers just appends.
name: The name of the header to insert.
value: The value to insert.
"""
self.__VerifyCapability(self.ACTION_ADDHDRS)
index = struct.pack('!I', index)
return '%s%s%s\0%s\0' % (RESPONSE['INSHEADER'], index, name, value)
def ChangeHeader(self, index, name, value):
"""Construct a CHGHEADER reply that the client can send during OnEndBody.
Args:
index: The index of the header to change, offset from 1.
The offset is per-occurance of this header, not of all headers.
A value of '' (empty string) will cause the header to be deleted.
name: The name of the header to insert.
value: The value to insert.
"""
self.__VerifyCapability(self.ACTION_CHGHDRS)
index = struct.pack('!I', index)
return '%s%s%s\0%s\0' % (RESPONSE['CHGHEADER'], index, name, value)
def ReturnOnEndBodyActions(self, actions):
"""Construct an OnEndBody response that can consist of multiple actions
followed by a final required Continue().
All message mutations (all adds/changes/deletes to envelope/header/body)
must be sent as response to the OnEndBody callback. Multiple actions
are allowed. This function formats those multiple actions into one
response to return back to the PpyMilterDispatcher.
For example to make sure all recipients are in 'To' headers:
+---------------------------------------------------------------------
| class NoBccMilter(PpyMilterBase):
| def __init__(self):
| self.__mutations = []
| ...
| def OnRcptTo(self, cmd, rcpt_to, esmtp_info):
| self.__mutations.append(self.AddHeader('To', rcpt_to))
| return self.Continue()
| def OnEndBody(self, cmd):
| tmp = self.__mutations
| self.__mutations = []
| return self.ReturnOnEndBodyActions(tmp)
| def OnResetState(self):
| self.__mutations = []
+---------------------------------------------------------------------
Args:
actions: List of "actions" to perform on the message.
For example:
actions=[AddHeader('Cc', 'lurker@example.com'),
AddRecipient('lurker@example.com')]
"""
return actions[:] + [self.Continue()]
def __ResetState(self):
"""Clear out any per-message data.
Milter connections correspond to SMTP connections, and many messages may be
sent in the same SMTP conversation. Any data stored that pertains to the
message that was just handled should be cleared so that it doesn't affect
processing of the next message. This method also implements an
'OnResetState' callback that milters can use to catch this situation too.
"""
try:
self.OnResetState()
except AttributeError:
logging.warn('No OnResetState() callback is defined for this milter.')
# you probably should not be overriding this :-p
def OnOptNeg(self, cmd, ver, actions, protocol):
"""Callback for the 'OptNeg' (option negotiation) milter command.
Shouldn't be necessary to override (don't do it unless you
know what you're doing).
Option negotation is based on:
(1) Command callback functions defined by your handler class.
(2) Stated actions your milter may perform by invoking the
"self.CanFoo()" functions during your milter's __init__().
"""
out = struct.pack('!III', MILTER_VERSION,
self.__actions & actions,
self.__protocol & protocol)
return cmd+out
def OnMacro(self, cmd, macro_cmd, data):
"""Callback for the 'Macro' milter command: no response required."""
return None
def OnQuit(self, cmd):
"""Callback for the 'Quit' milter command: close the milter connection.
The only logical response is to ultimately raise a
PpyMilterCloseConnection() exception.
"""
raise PpyMilterCloseConnection('received quit command')
def OnAbort(self, cmd):
"""Callback for the 'Abort' milter command.
This callback is required because per-message data must be cleared when an
Abort command is received. Otherwise any message modifications will end up
being applied to the next message that is sent down the same SMTP
connection.
Args:
cmd: Unused argument.
Returns:
A Continue response so that further messages in this SMTP conversation
will be processed.
"""
self.__ResetState()
return self.Continue()
def OnEndBody(self, cmd):
"""Callback for the 'EndBody' milter command.
If your milter wants to do any message mutations (add/change/delete any
envelope/header/body information) it needs to happen as a response to
this callback (so need to override this function and cause those
actions by returning using ReturnOnEndBodyActions() above).
Args:
cmd: Unused argument.
Returns:
A continue response so that further messages in this SMTP conversation
will be processed.
"""
return self.Continue()
# Call these from __init__() (after calling PpyMilter.__init__() :-p
# to tell sendmail you may perform these actions
# (otherwise performing the actions may fail).
def CanAddHeaders(self):
"""Register that our milter may perform the action 'ADDHDRS'."""
self.__actions |= self.ACTION_ADDHDRS
def CanChangeBody(self):
"""Register that our milter may perform the action 'CHGBODY'."""
self.__actions |= self.ACTION_CHGBODY
def CanAddRecipient(self):
"""Register that our milter may perform the action 'ADDRCPT'."""
self.__actions |= self.ACTION_ADDRCPT
def CanDeleteRecipient(self):
"""Register that our milter may perform the action 'DELRCPT'."""
self.__actions |= self.ACTION_DELRCPT
def CanChangeHeaders(self):
"""Register that our milter may perform the action 'CHGHDRS'."""
self.__actions |= self.ACTION_CHGHDRS
def CanQuarantine(self):
"""Register that our milter may perform the action 'QUARANTINE'."""
self.__actions |= self.ACTION_QUARANTINE
def __VerifyCapability(self, action):
if not (self.__actions & action):
logging.error('Error: Attempted to perform an action that was not' +
'requested.')
raise PpyMilterActionError('Action not requested in __init__')
| 36.867669
| 80
| 0.671942
|
8bf0f21d0c96e266abc21f3144d7a1ca12440244
| 3,981
|
py
|
Python
|
validator/sawtooth_validator/networking/future.py
|
Capco-JayPanicker123/Sawtooth
|
d22a16a6a82da5627ff113a1a5290f83b3f82c45
|
[
"Apache-2.0"
] | 2
|
2018-09-27T04:43:33.000Z
|
2019-10-23T14:32:31.000Z
|
validator/sawtooth_validator/networking/future.py
|
Capco-JayPanicker123/Sawtooth
|
d22a16a6a82da5627ff113a1a5290f83b3f82c45
|
[
"Apache-2.0"
] | 10
|
2020-05-12T06:58:15.000Z
|
2022-02-26T23:59:35.000Z
|
validator/sawtooth_validator/networking/future.py
|
Capco-JayPanicker123/Sawtooth
|
d22a16a6a82da5627ff113a1a5290f83b3f82c45
|
[
"Apache-2.0"
] | 1
|
2021-03-09T03:36:36.000Z
|
2021-03-09T03:36:36.000Z
|
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
import logging
from threading import Condition
from threading import RLock
import time
LOGGER = logging.getLogger(__name__)
class FutureResult:
def __init__(self, message_type, content, connection_id=None):
self.message_type = message_type
self.content = content
self.connection_id = connection_id
class FutureTimeoutError(Exception):
pass
class Future:
def __init__(self, correlation_id, request=None, callback=None,
timer_ctx=None):
self.correlation_id = correlation_id
self._request = request
self._result = None
self._condition = Condition()
self._create_time = time.time()
self._callback_func = callback
self._reconcile_time = None
self._timer_ctx = timer_ctx
def done(self):
return self._result is not None
@property
def request(self):
return self._request
def result(self, timeout=None):
with self._condition:
if self._result is None:
if not self._condition.wait(timeout):
raise FutureTimeoutError('Future timed out')
return self._result
def set_result(self, result):
with self._condition:
self._reconcile_time = time.time()
self._result = result
self._condition.notify()
def run_callback(self):
"""Calls the callback_func, passing in the two positional arguments,
conditionally waiting if the callback function hasn't been set yet.
Meant to be run in a threadpool owned by the FutureCollection.
Returns:
None
"""
if self._callback_func is not None:
try:
self._callback_func(self._request, self._result)
except Exception: # pylint: disable=broad-except
LOGGER.exception('An unhandled error occurred while running '
'future callback')
def get_duration(self):
return self._reconcile_time - self._create_time
def timer_stop(self):
if self._timer_ctx:
self._timer_ctx.stop()
self._timer_ctx = None
class FutureCollectionKeyError(Exception):
pass
class FutureCollection:
def __init__(self, resolving_threadpool=None):
self._futures = {}
self._lock = RLock()
self._resolving_threadpool = resolving_threadpool
def put(self, future):
self._futures[future.correlation_id] = future
def set_result(self, correlation_id, result):
with self._lock:
future = self.get(correlation_id)
future.set_result(result)
if self._resolving_threadpool is not None:
self._resolving_threadpool.submit(future.run_callback)
else:
future.run_callback()
def get(self, correlation_id):
try:
return self._futures[correlation_id]
except KeyError:
raise FutureCollectionKeyError(
"no such correlation id: {}".format(correlation_id))
def remove(self, correlation_id):
try:
del self._futures[correlation_id]
except KeyError:
raise FutureCollectionKeyError(
"no such correlation id: {}".format(correlation_id))
| 31.101563
| 80
| 0.63577
|
3dff6a44a7c954f3fffae69ca65e162f8b40860d
| 1,423
|
py
|
Python
|
compuG/cargarFondo.py
|
alejoso76/Computaci-n-gr-fica
|
474a498a328b8951aa0bfa1db2d0d1f3d8cc914b
|
[
"MIT"
] | null | null | null |
compuG/cargarFondo.py
|
alejoso76/Computaci-n-gr-fica
|
474a498a328b8951aa0bfa1db2d0d1f3d8cc914b
|
[
"MIT"
] | null | null | null |
compuG/cargarFondo.py
|
alejoso76/Computaci-n-gr-fica
|
474a498a328b8951aa0bfa1db2d0d1f3d8cc914b
|
[
"MIT"
] | null | null | null |
import pygame
import math
ANCHO=640
ALTO=480
def mostrarPos():
pos=pygame.mouse.get_pos()
return pos
if __name__ == '__main__':
pygame.init()
pantalla=pygame.display.set_mode([ANCHO, ALTO]) #Crea la ventana
#Carga la imagen a una variable
fondo=pygame.image.load('fondo.png')
infoFondo=fondo.get_rect()
print 'Funciona'
fin=False
var_x=-2
var_y=-2
pos_x=0
pos_y=0
pantalla.blit(fondo, [pos_x, pos_y])
pygame.display.flip()
reloj=pygame.time.Clock()
print infoFondo
while not fin:
pos=mostrarPos()
if pos[0]>590 and pos_x>=-1*(infoFondo[2]-ANCHO):
pantalla.blit(fondo, [pos_x, pos_y])
pygame.display.flip()
reloj.tick(10)
pos_x+=var_x
if pos[0]<50 and pos_x!=0:
pantalla.blit(fondo, [pos_x, pos_y])
pygame.display.flip()
reloj.tick(10)
pos_x-=var_x
if pos[1]>430 and pos_y>=-1*(infoFondo[3]-ALTO):
pantalla.blit(fondo, [pos_x, pos_y])
pygame.display.flip()
reloj.tick(10)
pos_y+=var_y
if pos[1]<50 and pos_y!=0:
pantalla.blit(fondo, [pos_x, pos_y])
pygame.display.flip()
reloj.tick(10)
pos_y-=var_y
for event in pygame.event.get():
if event.type == pygame.QUIT:
fin=True
| 23.327869
| 69
| 0.556571
|
2ae9dc0bb27a8544b6017e08a831118138a9a54d
| 7,189
|
py
|
Python
|
library/panos_bgp_redistribute.py
|
rvichery/ansible-pan
|
d07839cd5a544a6398646c01e1edac0f0f82cc38
|
[
"Apache-2.0"
] | null | null | null |
library/panos_bgp_redistribute.py
|
rvichery/ansible-pan
|
d07839cd5a544a6398646c01e1edac0f0f82cc38
|
[
"Apache-2.0"
] | null | null | null |
library/panos_bgp_redistribute.py
|
rvichery/ansible-pan
|
d07839cd5a544a6398646c01e1edac0f0f82cc38
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function
__metaclass__ = type
# Copyright 2018 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_bgp_redistribute
short_description: Configures a BGP Redistribution Rule
description:
- Use BGP to publish and consume routes from disparate networks.
author: "Joshua Colson (@freakinhippie)"
version_added: "2.8"
requirements:
- pan-python can be obtained from PyPI U(https://pypi.python.org/pypi/pan-python)
- pandevice can be obtained from PyPI U(https://pypi.python.org/pypi/pandevice)
notes:
- Checkmode is supported.
- Panorama is supported.
extends_documentation_fragment:
- panos.transitional_provider
- panos.state
- panos.full_template_support
options:
commit:
description:
- Commit configuration if changed.
default: True
type: bool
address_family_identifier:
description:
- Address Family Identifier.
choices:
- ipv4
- ipv6
default: 'ipv4'
enable:
description:
- Enable rule.
default: True
type: bool
metric:
description:
- Metric value.
type: int
name:
description:
- An IPv4 subnet or a defined Redistribution Profile in the virtual router.
required: True
route_table:
description:
- Summarize route.
choices:
- unicast
- multicast
- both
default: 'unicast'
set_as_path_limit:
description:
- Add the AS_PATHLIMIT path attribute.
type: int
set_community:
description:
- Add the COMMUNITY path attribute.
type: list
set_extended_community:
description:
- Add the EXTENDED COMMUNITY path attribute.
type: list
set_local_preference:
description:
- Add the LOCAL_PREF path attribute.
type: int
set_med:
description:
- Add the MULTI_EXIT_DISC path attribute.
type: int
set_origin:
description:
- New route origin.
choices:
- igp
- egp
- incomplete
default: 'incomplete'
vr_name:
description:
- Name of the virtual router; it must already exist.
- See M(panos_virtual_router)
default: 'default'
'''
EXAMPLES = '''
- name: BGP use Redistribution Policy 1
panos_bgp_redistribute:
provider: '{{ provider }}'
name: '10.2.3.0/24'
enable: true
commit: true
address_family_identifier: ipv4
set_origin: incomplete
vr_name: default
'''
RETURN = '''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.panos.panos import get_connection
try:
from pandevice.errors import PanDeviceError
from pandevice.network import VirtualRouter
from pandevice.network import Bgp
from pandevice.network import BgpRedistributionRule
except ImportError:
pass
def setup_args():
return dict(
commit=dict(
type='bool', default=True,
help='Commit configuration if changed'),
vr_name=dict(
default='default',
help='Name of the virtual router; it must already exist; see panos_virtual_router'),
name=dict(
type='str', required=True,
help='An IPv4 subnet or a defined Redistribution Profile in the virtual router'),
enable=dict(
default=True, type='bool',
help='Enable rule'),
address_family_identifier=dict(
type='str', default='ipv4', choices=['ipv4', 'ipv6'],
help='Address Family Identifier'),
route_table=dict(
type='str', default='unicast', choices=['unicast', 'multicast', 'both'],
help='Summarize route'),
set_origin=dict(
type='str', default='incomplete', choices=['igp', 'egp', 'incomplete'],
help='New route origin'),
set_med=dict(
type='int',
help='Add the MULTI_EXIT_DISC path attribute'),
set_local_preference=dict(
type='int',
help='Add the LOCAL_PREF path attribute'),
set_as_path_limit=dict(
type='int',
help='Add the AS_PATHLIMIT path attribute'),
set_community=dict(
type='list',
help='Add the COMMUNITY path attribute'),
set_extended_community=dict(
type='list',
help='Add the EXTENDED COMMUNITY path attribute'),
metric=dict(
type='int',
help='Metric value'),
)
def main():
helper = get_connection(
template=True,
template_stack=True,
with_state=True,
with_classic_provider_spec=True,
argument_spec=setup_args(),
)
module = AnsibleModule(
argument_spec=helper.argument_spec,
supports_check_mode=True,
required_one_of=helper.required_one_of,
)
parent = helper.get_pandevice_parent(module)
vr = VirtualRouter(module.params['vr_name'])
parent.add(vr)
try:
vr.refresh()
except PanDeviceError as e:
module.fail_json(msg='Failed refresh: {0}'.format(e))
bgp = vr.find('', Bgp)
if bgp is None:
module.fail_json(msg='BGP is not configured for "{0}"'.format(vr.name))
spec = {
'name': module.params['name'],
'enable': module.params['enable'],
'address_family_identifier': module.params['address_family_identifier'],
'route_table': module.params['route_table'],
'set_origin': module.params['set_origin'],
'set_med': module.params['set_med'],
'set_local_preference': module.params['set_local_preference'],
'set_as_path_limit': module.params['set_as_path_limit'],
'set_community': module.params['set_community'],
'set_extended_community': module.params['set_extended_community'],
'metric': module.params['metric'],
}
listing = bgp.findall(BgpRedistributionRule)
obj = BgpRedistributionRule(**spec)
bgp.add(obj)
changed = helper.apply_state(obj, listing, module)
if changed and module.params['commit']:
helper.commit(module)
module.exit_json(changed=changed, msg='done')
if __name__ == '__main__':
main()
| 29.584362
| 96
| 0.621783
|
c3d7052b1861c2454a434f77adce55e0266b2cd6
| 1,964
|
py
|
Python
|
app/modules/core/request.py
|
JohJohan/silverback
|
e27bc5d238d2b34955a470a8e8327ae44022b78b
|
[
"Apache-2.0"
] | null | null | null |
app/modules/core/request.py
|
JohJohan/silverback
|
e27bc5d238d2b34955a470a8e8327ae44022b78b
|
[
"Apache-2.0"
] | null | null | null |
app/modules/core/request.py
|
JohJohan/silverback
|
e27bc5d238d2b34955a470a8e8327ae44022b78b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Silverbackhq
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Third Party Library
from django.utils.translation import gettext as _
# Local Library
from app.modules.util.helpers import Helpers
class Request():
def __init__(self, request=None):
self.__request = request
self.__helpers = Helpers()
self.__logger = self.__helpers.get_logger(__name__)
def set_request(self, request):
self.__request = request
def get_request_data(self, method, predicted):
request_data = {}
log_data = {}
correlation_id = self.__request.META["X-Correlation-ID"] if "X-Correlation-ID" in self.__request.META else ""
data_bag = self.__request.POST if method.lower() == "post" else self.__request.GET
for key, default in predicted.items():
if "password" in key:
log_data[key] = "<hidden>" if key in data_bag else default
elif "token" in key:
log_data[key] = "<hidden>" if key in data_bag else default
else:
log_data[key] = data_bag[key] if key in data_bag else default
request_data[key] = data_bag[key] if key in data_bag else default
self.__logger.debug(_("App Incoming Request: %(data)s {'correlationId':'%(correlationId)s'}") % {
"data": self.__helpers.json_dumps(log_data),
"correlationId": correlation_id
})
return request_data
| 37.056604
| 117
| 0.67057
|
0c6490b499ba4c61305cbdfed871471f52659b84
| 9,684
|
py
|
Python
|
cogs/tarot.py
|
andresgsepulveda/Sovereign-of-Chaos
|
9f362e3eeb31925712f65f94b9db9a4740689dba
|
[
"MIT"
] | 1
|
2021-08-16T19:10:14.000Z
|
2021-08-16T19:10:14.000Z
|
cogs/tarot.py
|
andresgsepulveda/Sovereign-of-Chaos
|
9f362e3eeb31925712f65f94b9db9a4740689dba
|
[
"MIT"
] | 28
|
2021-08-16T15:50:10.000Z
|
2021-12-24T22:58:10.000Z
|
cogs/tarot.py
|
andresgsepulveda/Sovereign-of-Chaos
|
9f362e3eeb31925712f65f94b9db9a4740689dba
|
[
"MIT"
] | null | null | null |
##############################################
# Package Imports
##############################################
import asyncio
import discord
import json
import pytz
import random
from datetime import datetime, timedelta
from discord import Embed, Guild, User
from discord.ext import commands, tasks
from discord.ext.commands import Bot, Context
from typing import Iterable
import database
from log import ConsoleLog
##############################################
# Package Imports
##############################################
MODULE = "TAROT"
CARD_FILE_PATH = "data/card_data.json"
CARD_IMAGE_LINK = "https://i.ibb.co/FxCgHwK/cards-fortune-future-moon-star-tarot-tarot-card-1-512.webp"
READ_TAG = 'r'
GET_EVENTS_SCRIPT = """
SELECT server_id, user_id, event_time
FROM events
WHERE category = "tarot";
"""
##############################################
# Tarot Cog
##############################################
class Tarot( commands.Cog, name = "Tarot" ):
def __init__( self, bot: Bot ):
self.bot = bot
self.logging = ConsoleLog()
self.db = database.DB()
self.loadCardData()
# self.loadTimezones()
# self.loadTimezoneEmbed()
@tasks.loop( hours = 24 )
async def dailyTarotReading( self, tarot_event: Iterable[tuple] ) -> None:
guildID = tarot_event[0]
guild = await self.bot.fetch_guild(guildID)
userID = tarot_event[1]
user = await guild.fetch_member(userID)
eventTime = tarot_event[2]
await self.beforeDailyTarotReading( guild, user, eventTime )
await user.send(f"How have you been {user}? I've got your daily tarot reading ready to go! Take a look below:")
question = ""
numCards = 3
deck = self.card_list.copy()
random.shuffle( deck )
cards = self.drawCardsFromList( deck, numCards )
embed = self.createCardsEmbed( user, cards, question )
await user.send( embed = embed )
return
async def beforeDailyTarotReading( self, guild: Guild, user: User, event_time: str ) -> None:
if event_time[0:1].startswith("0"):
hour = int(event_time[1])
else:
hour = int(event_time[0:1])
if event_time[3:4].startswith("0"):
minute = int(event_time[4])
else:
minute = int(event_time[3:4])
now = datetime.now()
future = datetime(now.year, now.month, now.day, hour, minute)
if now.hour >= hour and now.minute > minute:
future += timedelta(days=1)
delta = (future - now).seconds
self.logging.send( MODULE, f"Scheduled daily tarot reading for '{user}' at '{hour}:{minute}' MT." )
await asyncio.sleep(delta)
return
##############################################
# Tarot Cog Events
##############################################
@commands.Cog.listener()
async def on_ready( self ) -> None:
self.db.start()
self.db.executeScript(GET_EVENTS_SCRIPT)
results = self.db.cursor.fetchall()
for tarotEvent in results:
self.dailyTarotReading.start(tarotEvent)
self.db.stop()
return
##############################################
##############################################
# Tarot Cog Commands
##############################################
@commands.group( name = "tarot" )
async def tarot( self, ctx: Context ) -> None:
if ctx.invoked_subcommand is None:
await ctx.send("ERROR: Tarot command(s) improperly invoked. Please see '!help' for a list of commands and usage examples.")
return
@tarot.command( name = "draw" )
async def drawCards( self, ctx: Context, *args ) -> None:
"""
Allows the user to draw a number card from a tarot deck. The user can draw anywhere from 1 to 25 cards at a time.
"""
await self.bot.wait_until_ready()
# ERROR CASE: If not enough arguments
if len(args) <= 0:
await ctx.send("ERROR: Not enough arguments to execute command. Please try using the command like so: `!tarot draw <number>`")
return
# ERROR CASE: If arguments don't make sense
try:
numCards = int(args[0])
if numCards < 1 or numCards > 25:
await ctx.send("ERROR: Number of cards given is out of bounds. Please try with a number between 1 and 25.")
return
except Exception as e:
exception = f"{type(e).__name__}: {e}"
await ctx.send("ERROR: Not valid input. Consider using the command like this: `!tarot draw <number>`. Keep in mind the module **can only support up to 25 cards** drawn at a time.")
self.logging.send( MODULE, f"ERROR: Can't parse command input for 'draw': {exception}")
return
await ctx.send("What is the question you would like to ask to the cards?")
msg = await self.bot.wait_for("message")
question = msg.content
# Shuffle the cards
await ctx.send( "Shuffling my deck...")
deck = self.card_list.copy()
random.shuffle(deck)
# Roll for each card indicated
await ctx.send( f"Drawing `{numCards}` card(s)...")
cards = self.drawCardsFromList( deck, numCards )
user = ctx.message.author
# Create embed
embed = self.createCardsEmbed( user, cards, question )
# Send embed to chat
await ctx.send( embed = embed )
return
@tarot.command( name = "subscribe" , aliases = ["sub"])
async def subscribeForDailyTarot( self, ctx: Context ):
await self.bot.wait_until_ready()
# ERROR CASE: if they already have a event scheduled
# Continue conversation in DM's
# Ask for Timezone
# Get timezone
# Ask for Time
# Convert time from given timezone to MT
# Update DB with information
# Confirm it has been scheduled at a given time!
return
@tarot.command( name = "unsubscribe", aliases = ["unsub"])
async def unsubscribeFromDailyTarot( self, ctx: Context ):
await self.bot.wait_until_ready()
# ERROR CASE: if they don't have an event scheduled
# Confirm they would like to unsubscribe
# If yes, then remove them from the DB
# Confirm they have been removed from the events DB
return
##############################################
# Tarot Cog Support Functions
##############################################
# Synchronous Support Functions
def createCardsEmbed( self, user: User, cardLst: Iterable[list], question: str ) -> Embed:
"""
Support functions for '!tarot draw'. Creates the card embed and returns it for sending.
"""
# Get user from context
# Create embed
embed = discord.Embed(
title = f"Tarot Reading",
description = f"**Question:** {question}",
color = discord.Color.purple(),
timestamp = datetime.now() )
embed.set_author(
name = str(user),
icon_url = str(user.avatar_url)
)
embed.set_thumbnail( url = CARD_IMAGE_LINK )
# Add fields iteratively
count = 1
for card in cardLst:
cardName = card["name"]
cardStr = f"{count}. {cardName}"
reversed = random.randint( 0, 1 )
if reversed == 0:
meaning_key = "meaning_rev"
cardStr += " (R) [NO]"
else:
meaning_key = "meaning_up"
cardStr += " [YES]"
cardDesc = card[meaning_key]
embed.add_field(
name = cardStr,
inline = False,
value = cardDesc
)
count += 1
return embed
def drawCardsFromList( self , deck: Iterable[list], numCards: int ) -> Iterable[list]:
"""
Support function for '!tarot draw'. Pulls a specified number of items from the given 'deck'.
"""
cards = []
iterations = 0
# For the number of cards needed (or while we're there)
# While loop used to not worry about length of deck
while iterations < numCards:
# If we only need one card, top deck
if numCards == 1:
roll = 0
else:
# Roll for whatever card in the deck
roll = random.randint( 0, len(deck) - 1)
# Remove card from deck
card = deck.pop( roll )
# Add card to drawn list
cards.append( card )
iterations += 1
# Return cards back to the user
return cards
def loadCardData( self ) -> None:
"""
Support function for Tarot Cog. Loads the card list into memory on bot startup.
"""
self.logging.send( MODULE, "> Loading card list...")
# Open card list file and set card_list var to json result
try:
with open( CARD_FILE_PATH, READ_TAG ) as read_file:
self.card_list = json.load(read_file)
self.card_list = self.card_list["cards"]
self.logging.send( MODULE, "> Card list loaded successfully!")
# Print out what went wrong on startup
except Exception as e:
exception = f"{type(e).__name__}: {e}"
self.logging.send( MODULE,
f"> ERROR: Failed to load card lists. \n{exception}" )
self.logging.send( MODULE,
"WARNING: Tarot module may not work as intended. See error output for more details." )
return
def loadTimezones( self ) -> None:
for tz in pytz.common_timezones:
print(tz)
return
def loadTimezoneEmbed( self ) -> None:
return
# End of Tarot Cog
##############################################
# Setup Function for Bot
##############################################
def setup( bot: Bot ) -> None:
logging = ConsoleLog()
logging.send( MODULE, f"Attempting load of '{MODULE}' extension...")
bot.add_cog( Tarot( bot ) )
| 28.994012
| 186
| 0.576828
|
5a78d3002eb03be5be5f4235741d99fd3f68cc89
| 607
|
py
|
Python
|
spr_1dzial/sygnal.py
|
virgohi21/school-3
|
147c01f29bbc62942491e2a7886a4d5d8fb848a3
|
[
"BSD-2-Clause"
] | null | null | null |
spr_1dzial/sygnal.py
|
virgohi21/school-3
|
147c01f29bbc62942491e2a7886a4d5d8fb848a3
|
[
"BSD-2-Clause"
] | null | null | null |
spr_1dzial/sygnal.py
|
virgohi21/school-3
|
147c01f29bbc62942491e2a7886a4d5d8fb848a3
|
[
"BSD-2-Clause"
] | null | null | null |
tablica = []
wszystkie_linijki = []
with open('sygnaly.txt', 'r') as f:
for x in range(1000):
slowo = f.readline()
if (x+1)%40 == 0:
tablica.append(slowo)
wszystkie_linijki.append(slowo)
for x in range(len(tablica)):
slowo = tablica[x]
print(slowo[9],end='')
print()
najwieksza_liczba=0
for x in range(len(wszystkie_linijki)):
a = wszystkie_linijki[x]
if len(a) > najwieksza_liczba:
najdluzsza_linijka=x
najwieksza_liczba = len(a)
with open('odp.txt', 'a') as f:
f.write(f'{wszystkie_linijki[najdluzsza_linijka]}')
| 20.931034
| 55
| 0.61285
|
bedb54db2d8a6bdb4bbc2bad3998cbe9da87fbd8
| 2,366
|
py
|
Python
|
pyleecan/GUI/Dialog/DMachineSetup/SPreview/WMachineTable/WMachineTable.py
|
carbon-drive/pyleecan
|
e89d4fe97f23f6182c19127d2c6a2133614e169d
|
[
"Apache-2.0"
] | 1
|
2021-07-08T01:27:24.000Z
|
2021-07-08T01:27:24.000Z
|
pyleecan/GUI/Dialog/DMachineSetup/SPreview/WMachineTable/WMachineTable.py
|
ecs-kev/pyleecan
|
1faedde4b24acc6361fa1fdd4e980eaec4ca3a62
|
[
"Apache-2.0"
] | null | null | null |
pyleecan/GUI/Dialog/DMachineSetup/SPreview/WMachineTable/WMachineTable.py
|
ecs-kev/pyleecan
|
1faedde4b24acc6361fa1fdd4e980eaec4ca3a62
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from PySide2.QtWidgets import QWidget, QTableWidgetItem
from ......GUI.Dialog.DMachineSetup.SPreview.WMachineTable.Ui_WMachineTable import (
Ui_WMachineTable,
)
import matplotlib.pyplot as plt
from ......Functions.Plot.set_plot_gui_icon import set_plot_gui_icon
class WMachineTable(Ui_WMachineTable, QWidget):
"""Table to display the main paramaters of the machine"""
def __init__(self, parent=None):
"""Initialize the GUI
Parameters
----------
self : SWindCond
A SWindCond widget
"""
# Build the interface according to the .ui file
QWidget.__init__(self, parent)
self.setupUi(self)
self.machine = None
# Connect the widget
self.b_mmf.clicked.connect(self.plot_mmf)
self.b_plot_machine.clicked.connect(self.plot_machine)
def update_tab(self, machine):
"""Update the table to match the machine
Parameters
----------
self : WMachineTable
A WMachineTable object
"""
self.machine = machine
desc_dict = self.machine.comp_desc_dict()
self.tab_param.clear()
# Set header
self.tab_param.setColumnCount(2)
item = QTableWidgetItem("Name")
self.tab_param.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem("Value")
self.tab_param.setHorizontalHeaderItem(1, item)
# Set containt
for ii, desc in enumerate(desc_dict):
if desc["value"] is not None:
self.tab_param.insertRow(ii)
self.tab_param.setItem(ii, 0, QTableWidgetItem(desc["verbose"]))
if desc["type"] is float:
txt = format(desc["value"], ".4g")
else:
txt = str(desc["value"])
if desc["unit"] not in ["", None]:
txt += " " + desc["unit"]
self.tab_param.setItem(ii, 1, QTableWidgetItem(txt))
def plot_mmf(self):
"""Plot the unit mmf of the stator"""
if self.machine is not None:
self.machine.stator.plot_mmf_unit(is_show_fig=True)
set_plot_gui_icon()
def plot_machine(self):
"""Plot the machine"""
if self.machine is not None:
self.machine.plot()
set_plot_gui_icon()
| 31.131579
| 84
| 0.590025
|
f83cb3b6979ab3e034f71fdb7143f7decdcb56d7
| 8,002
|
py
|
Python
|
plugins/modules/oci_key_management_generated_key.py
|
hanielburton/oci-ansible-collection
|
dfdffde637f746d346ba35569be8c3a3407022f2
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_key_management_generated_key.py
|
hanielburton/oci-ansible-collection
|
dfdffde637f746d346ba35569be8c3a3407022f2
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_key_management_generated_key.py
|
hanielburton/oci-ansible-collection
|
dfdffde637f746d346ba35569be8c3a3407022f2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2017, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_key_management_generated_key
short_description: Manage a GeneratedKey resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create a GeneratedKey resource in Oracle Cloud Infrastructure
- For I(state=present), generates a key that you can use to encrypt or decrypt data.
version_added: "2.9"
author: Oracle (@oracle)
options:
associated_data:
description:
- Information that can be used to provide an encryption context for the encrypted data.
The length of the string representation of the associated data must be fewer than 4096
characters.
type: dict
include_plaintext_key:
description:
- If true, the generated key is also returned unencrypted.
type: bool
required: true
key_id:
description:
- The OCID of the master encryption key to encrypt the generated data encryption key with.
type: str
required: true
key_shape:
description:
- ""
type: dict
required: true
suboptions:
algorithm:
description:
- The algorithm used by a key's key versions to encrypt or decrypt.
type: str
choices:
- "AES"
- "RSA"
required: true
length:
description:
- The length of the key, expressed as an integer. Values of 16, 24, or 32 are supported.
type: int
required: true
logging_context:
description:
- Information that provides context for audit logging. You can provide this additional
data by formatting it as key-value pairs to include in audit logs when audit logging is enabled.
type: dict
service_endpoint:
description:
- The endpoint of the service to call using this client. For example 'https://kms.{region}.{secondLevelDomain}'.
type: str
required: true
state:
description:
- The state of the GeneratedKey.
- Use I(state=present) to create a GeneratedKey.
type: str
required: false
default: 'present'
choices: ["present"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource ]
"""
EXAMPLES = """
- name: Create generated_key
oci_key_management_generated_key:
include_plaintext_key: true
key_id: ocid1.key.oc1.iad.exampledaaeug.examplestkvmbjdnbickxcvbotxd5q23tteidhj4q2c6qfauxm32i577yu5a
key_shape:
algorithm: AES
length: 16
service_endpoint: "https://xxx.kms.{region}.oraclecloud.com"
"""
RETURN = """
generated_key:
description:
- Details of the GeneratedKey resource acted upon by the current operation
returned: on success
type: complex
contains:
ciphertext:
description:
- The encrypted data encryption key generated from a master encryption key.
returned: on success
type: string
sample: ciphertext_example
plaintext:
description:
- "The plaintext data encryption key, a base64-encoded sequence of random bytes, which is
included if the L(GenerateDataEncryptionKey,https://docs.cloud.oracle.com/api/#/en/key/latest/GeneratedKey/GenerateDataEncryptionKey)
request includes the `includePlaintextKey` parameter and sets its value to \\"true\\"."
returned: on success
type: string
sample: plaintext_example
plaintext_checksum:
description:
- "The checksum of the plaintext data encryption key, which is included if the
L(GenerateDataEncryptionKey,https://docs.cloud.oracle.com/api/#/en/key/latest/GeneratedKey/GenerateDataEncryptionKey)
request includes the `includePlaintextKey` parameter and sets its value to \\"true\\"."
returned: on success
type: string
sample: plaintext_checksum_example
sample: {
"ciphertext": "ciphertext_example",
"plaintext": "plaintext_example",
"plaintext_checksum": "plaintext_checksum_example"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.key_management import KmsCryptoClient
from oci.key_management.models import GenerateKeyDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class GeneratedKeyHelperGen(OCIResourceHelperBase):
"""Supported operations: create"""
def get_module_resource_id(self):
return None
# There is no idempotency for this module (no get or list ops)
def get_matching_resource(self):
return None
def get_create_model_class(self):
return GenerateKeyDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.generate_data_encryption_key,
call_fn_args=(),
call_fn_kwargs=dict(generate_key_details=create_details,),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.CREATE_OPERATION_KEY,
),
)
GeneratedKeyHelperCustom = get_custom_class("GeneratedKeyHelperCustom")
class ResourceHelper(GeneratedKeyHelperCustom, GeneratedKeyHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=False
)
module_args.update(
dict(
associated_data=dict(type="dict"),
include_plaintext_key=dict(type="bool", required=True),
key_id=dict(type="str", required=True),
key_shape=dict(
type="dict",
required=True,
options=dict(
algorithm=dict(type="str", required=True, choices=["AES", "RSA"]),
length=dict(type="int", required=True),
),
),
logging_context=dict(type="dict"),
service_endpoint=dict(type="str", required=True),
state=dict(type="str", default="present", choices=["present"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="generated_key",
service_client_class=KmsCryptoClient,
namespace="key_management",
)
result = dict(changed=False)
if resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 34.196581
| 151
| 0.650212
|
55cbd0a0e7ff4bc65b2d001ceb1471cdc337f438
| 59
|
py
|
Python
|
padqc/tools/__init__.py
|
qis-unipr/padqc
|
94599db20711dc755b53425951fa3cb15b749f64
|
[
"Apache-2.0"
] | null | null | null |
padqc/tools/__init__.py
|
qis-unipr/padqc
|
94599db20711dc755b53425951fa3cb15b749f64
|
[
"Apache-2.0"
] | null | null | null |
padqc/tools/__init__.py
|
qis-unipr/padqc
|
94599db20711dc755b53425951fa3cb15b749f64
|
[
"Apache-2.0"
] | 1
|
2021-02-18T22:11:18.000Z
|
2021-02-18T22:11:18.000Z
|
from .visualize import circuit_drawer
from .depth import *
| 19.666667
| 37
| 0.813559
|
ff1c299241b7e4be1dd6cbb8849f4cf1f981c8e0
| 11,708
|
py
|
Python
|
src/osuapi.py
|
kisunpoke/stk8-bot
|
2032b2058e59c4b0c3e42e5947622dd114d95cec
|
[
"Apache-2.0"
] | null | null | null |
src/osuapi.py
|
kisunpoke/stk8-bot
|
2032b2058e59c4b0c3e42e5947622dd114d95cec
|
[
"Apache-2.0"
] | null | null | null |
src/osuapi.py
|
kisunpoke/stk8-bot
|
2032b2058e59c4b0c3e42e5947622dd114d95cec
|
[
"Apache-2.0"
] | null | null | null |
"""Async functions for interacting with the osu! api and prepping its data.
This uses the v1 endpoint because it is more stable. Use v2 when available or
necessary.
"""
import aiohttp
from enum import IntFlag
import os
import math
import db_manip
import db_get
api_key = os.getenv("osu_key")
class Mods(IntFlag):
"""Enum of the osu! mods exposed by the API.
Underscores used for n-key mods because variable naming restrictions."""
NM = 0,
NF = 1,
EZ = 2,
TD = 4,
HD = 8,
HR = 16,
SD = 32,
DT = 64,
RX = 128,
HT = 256,
NC = 512,
FL = 1024,
AT = 2048,
SO = 4096,
AP = 8192,
PF = 16384,
_4K = 32768,
_5K = 65536,
_6K = 131072,
_7K = 262144,
_8K = 524288,
FI = 1048576,
RD = 2097152,
CN = 4194304,
TP = 8388608,
_9K = 16777216,
CO = 33554432,
_1K = 67108864,
_3K = 134217728,
_2K = 268435456,
V2 = 536870912,
MR = 1073741824
def to_list(self):
"""Returns a list of strings represented by this enumeration."""
mod_list = str(self).split("|")
mod_list[0] = mod_list[0].split("Mods.")[1]
return mod_list
async def get_player_data(username):
"Return full JSON response from the osu! API with the given username."
#inherently works with either ID or username, ID preferred
async with aiohttp.ClientSession() as session:
player_request = await session.get(f'https://osu.ppy.sh/api/get_user?k={api_key}&u={username}')
player_data = await player_request.json()
#print(player_data)
return player_data[0]
async def get_map_data(diff_id):
async with aiohttp.ClientSession() as session:
map_request = await session.get(f'https://osu.ppy.sh/api/get_beatmaps?k={api_key}&b={diff_id}')
map_data = await map_request.json()
'''
thumbnail_url = f'https://b.ppy.sh/thumb/{map_data[0]["beatmapset_id"]}l.jpg'
map_name = f'{map_data[0]["artist"]} - {map_data[0]["title"]}'
data = {
'map_url': map_url,
'thumbnail_url': thumbnail_url,
'map_name': map_name
}
return data
'''
return map_data[0]
async def get_match_data(match_id):
async with aiohttp.ClientSession() as session:
match_request = await session.get(f'https://osu.ppy.sh/api/get_match?k={api_key}&mp={match_id}')
match_data = await match_request.json()
return match_data
async def process_match_data(match_id, map, *, data=None, player_ids={}, ignore_threshold=1000, ignore_player_ids=[]):
#no head-to-head functionality yet
"""Returns a dict of match data tailored for stat calculation.
`data` is expected to be the data of a `get_match_data()` call, and is used in lieu of calling
the osu! API - helpful if successive calls of this function for the same match occur.
Otherwise, `match_id` is used to get match data, then the nth `map` (zero-indexed) is
obtained and processed. If available, `player_ids` should be provided, a dict of `player_ids`
(str) to [`player_names` (str), `team_name` (str)].
Map indexes are redirected like other paginated functions; indexes less than 0 become 0, and
indexes greater than the max index become the max index.
- `ignore_player_list` will ignore specific player ids from calculation.
- `ignore_threshold` will ignore scores below a specific value. 1000 by default.
This function aims to expose useful data not normally available from the get_match
endpoint of the API.
Returns the following dict:
```
{
"match_name": str,
"match_id": str,
"match_url": f'https://osu.ppy.sh/community/matches/{match_id}',
"diff_id": str,
"diff_url": f'https://osu.ppy.sh/b/{diff_id}',
"map_thumbnail": f'https://b.ppy.sh/thumb/{diff_id}l.jpg',
"map_name": f'{artist} - {title}',
"winner": str, #(1 or 2)
"score_difference": int,
"team_1_score": int,
"team_2_score": int,
"team_1_score_avg": float,
"team_2_score_avg": float,
"individual_scores": [
{
"user_id": str,
"user_name": str,
"score": int,
"combo": int,
"accuracy": float,
"mod_val": int,
"mods": [str, str, ...],
"pass": str, #"0" or "1", where "0" is fail
"hits": {
"300_count": int,
"100_count": int,
"50_count": int,
"miss_count": int
},
"team_contrib": float,
"team": str #1 or 2,
"team_name": str #equivalent to the _id of a Team document
}, ...
]
"start_time": str,
"scoring_type": str,
"team_type": str,
"play_mode": str,
"player_ids": {str: str, ...} #key is player id as str, value is actual username as str
}
```
"""
match_data = data
if not match_data:
match_data = await get_match_data(match_id)
max_index = len(match_data["games"])-1
if map < 0:
map = 1
if map > max_index:
map = max_index
game_data = match_data["games"][int(map)]
#stop execution here if no scores are available, but there was a game for some reason
if not game_data['scores']:
return None
map_data = await get_map_data(game_data["beatmap_id"])
#now we'll start number crunching and stuff
#if head-to-head or tag co-op is selected
if game_data['team_type'] in ('0', '1'):
#currently unsupported!
return None
#if a team mode is selected
if game_data['team_type'] in ('2', '3'):
#determine who belongs in what team as well as the team scores
#as of now this is only used to get the number of players on a team, since we use
#a conditional to add teams to the correct field anyways
team_1_players = []
team_2_players = []
team_1_score = 0
team_2_score = 0
for player_score in game_data['scores']:
#ignore if below minimum score threshold or in ignore list
if int(player_score["score"]) < ignore_threshold or player_score["user_id"] in ignore_player_ids:
continue
if player_score["team"] == "1":
team_1_players.append(player_score["user_id"])
team_1_score += int(player_score["score"])
if player_score["team"] == "2":
team_2_players.append(player_score["user_id"])
team_2_score += int(player_score["score"])
#who won
if team_1_score != team_2_score:
winner = "Blue" if team_1_score > team_2_score else "Red"
else:
winner = "Tie"
#score diff
score_diff = abs(team_1_score-team_2_score)
#generate the data for individual player scores for this map
individual_scores = []
for player_score in game_data["scores"]:
#ignore if below minimum score threshold or in ignore list
if int(player_score["score"]) < ignore_threshold or player_score["user_id"] in ignore_player_ids:
continue
count_300 = int(player_score["count300"])
count_100 = int(player_score["count100"])
count_50 = int(player_score["count50"])
count_miss = int(player_score["countmiss"])
acc_count = count_300 + count_100 + count_50 + count_miss
acc_value = (count_300+(count_100/3)+(count_50/6))/acc_count
score = int(player_score["score"])
contrib = score/team_1_score if player_score["team"] == "1" else score/team_2_score
#if we don't currently know what the name of a certain player id is, look it up against the mongodb and osuapi, in that order
#might fail if the player is restricted, not sure on that
try:
player_name = player_ids[player_score["user_id"]][0]
team_name = player_ids[player_score["user_id"]][1]
except:
#print(f"Hit MongoDB for player ID {player_score['user_id']}")
player_document = await db_get.get_player_document(player_score['user_id'])
if player_document == None:
#this means that we don't have this player saved for some reason
#so we'll go the alternative route, getting the username manually
#this'll probably happen if somebody tries to get a non-tournament mp
print(f"MongoDB lookup for {player_score['user_id']} failed, resorting to osu! api")
player_data = await get_player_data(player_score["user_id"])
player_name = player_data["username"]
team_name = ""
else:
player_name = player_document["user_name"]
team_name = player_document["team_name"]
#add to player_ids dict, which will help us build a cache over time for certain processes
player_ids[player_score["user_id"]] = [player_name, team_name]
individual_score = {
"user_id": player_score["user_id"],
"user_name": player_name,
"score": score,
"combo": int(player_score["maxcombo"]),
"accuracy": acc_value,
"mod_val": int(game_data["mods"]),
"mods": Mods(int(game_data["mods"])).to_list(), #global mods assumed
"pass": player_score["pass"],
"hits": {
"300_count": count_300,
"100_count": count_100,
"50_count": count_50,
"miss_count": count_miss
},
"team_contrib": contrib,
"team": player_score["team"],
"team_name": team_name
}
individual_scores.append(individual_score)
#import pprint
#pprint.pprint(match_data)
#pprint.pprint(game_data)
team_vs_final = {
"match_name": match_data["match"]["name"],
"match_id": match_id,
"match_url": f'https://osu.ppy.sh/community/matches/{match_id}',
"diff_id": game_data["beatmap_id"],
"diff_url": f'https://osu.ppy.sh/b/{game_data["beatmap_id"]}',
"map_thumbnail": f'https://b.ppy.sh/thumb/{map_data["beatmapset_id"]}l.jpg',
"map_name": f'{map_data["artist"]} - {map_data["title"]} [{map_data["version"]}]',
"winner": winner,
"score_difference": score_diff,
"team_1_score": team_1_score,
"team_2_score": team_2_score,
"team_1_score_avg": round(team_1_score/len(team_1_players),2) if len(team_1_players) != 0 else 0,
"team_2_score_avg": round(team_2_score/len(team_2_players),2) if len(team_2_players) != 0 else 0,
"individual_scores": individual_scores,
"start_time": game_data["start_time"],
"scoring_type": game_data["scoring_type"],
"team_type": game_data["team_type"],
"play_mode": game_data["play_mode"],
"player_ids": player_ids
}
return team_vs_final
| 40.794425
| 138
| 0.571917
|
00e05fbbced30da30da7c2a476e4e50fc557fc7c
| 12,148
|
py
|
Python
|
rasa/utils/plotting.py
|
Next-Trends/rasa
|
c06dc26b3a57dd1114b60aebcc9ccd3bbb8308d7
|
[
"Apache-2.0"
] | 3,603
|
2017-05-21T18:34:55.000Z
|
2019-04-16T11:58:09.000Z
|
rasa/utils/plotting.py
|
Next-Trends/rasa
|
c06dc26b3a57dd1114b60aebcc9ccd3bbb8308d7
|
[
"Apache-2.0"
] | 2,782
|
2017-05-21T20:36:15.000Z
|
2019-04-16T14:35:20.000Z
|
rasa/utils/plotting.py
|
Next-Trends/rasa
|
c06dc26b3a57dd1114b60aebcc9ccd3bbb8308d7
|
[
"Apache-2.0"
] | 1,337
|
2017-05-21T18:10:33.000Z
|
2019-04-16T09:14:42.000Z
|
import logging
import itertools
import os
from functools import wraps
import numpy as np
from typing import Any, Callable, List, Optional, Text, TypeVar, Union, Tuple
import matplotlib
from matplotlib.ticker import FormatStrFormatter
import rasa.shared.utils.io
from rasa.constants import RESULTS_FILE
logger = logging.getLogger(__name__)
def _fix_matplotlib_backend() -> None:
"""Tries to fix a broken matplotlib backend."""
try:
backend = matplotlib.get_backend()
except Exception: # skipcq:PYL-W0703
logger.error(
"Cannot retrieve Matplotlib backend, likely due to a compatibility "
"issue with system dependencies. Please refer to the documentation: "
"https://matplotlib.org/stable/tutorials/introductory/usage.html#backends"
)
raise
# At first, matplotlib will be initialized with default OS-specific
# available backend
if backend == "TkAgg":
try:
# on OSX sometimes the tkinter package is broken and can't be imported.
# we'll try to import it and if it fails we will use a different backend
import tkinter # noqa: 401
except (ImportError, ModuleNotFoundError):
logger.debug("Setting matplotlib backend to 'agg'")
matplotlib.use("agg")
# if no backend is set by default, we'll try to set it up manually
elif backend is None: # pragma: no cover
try:
# If the `tkinter` package is available, we can use the `TkAgg` backend
import tkinter # noqa: 401
logger.debug("Setting matplotlib backend to 'TkAgg'")
matplotlib.use("TkAgg")
except (ImportError, ModuleNotFoundError):
logger.debug("Setting matplotlib backend to 'agg'")
matplotlib.use("agg")
ReturnType = TypeVar("ReturnType")
FuncType = Callable[..., ReturnType]
_MATPLOTLIB_BACKEND_FIXED = False
def _needs_matplotlib_backend(func: FuncType) -> FuncType:
"""Decorator to fix matplotlib backend before calling a function."""
@wraps(func)
def inner(*args: Any, **kwargs: Any) -> ReturnType:
"""Replacement function that fixes matplotlib backend."""
global _MATPLOTLIB_BACKEND_FIXED
if not _MATPLOTLIB_BACKEND_FIXED:
_fix_matplotlib_backend()
_MATPLOTLIB_BACKEND_FIXED = True
return func(*args, **kwargs)
return inner
@_needs_matplotlib_backend
def plot_confusion_matrix(
confusion_matrix: np.ndarray,
classes: Union[np.ndarray, List[Text]],
normalize: bool = False,
title: Text = "Confusion matrix",
color_map: Any = None,
zmin: int = 1,
output_file: Optional[Text] = None,
) -> None:
"""
Print and plot the provided confusion matrix.
Normalization can be applied by setting `normalize=True`.
Args:
confusion_matrix: confusion matrix to plot
classes: class labels
normalize: If set to true, normalization will be applied.
title: title of the plot
color_map: color mapping
zmin:
output_file: output file to save plot to
"""
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
zmax = confusion_matrix.max() if len(confusion_matrix) > 0 else 1
plt.clf()
if not color_map:
color_map = plt.cm.Blues
plt.imshow(
confusion_matrix,
interpolation="nearest",
cmap=color_map,
aspect="auto",
norm=LogNorm(vmin=zmin, vmax=zmax),
)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=90)
plt.yticks(tick_marks, classes)
if normalize:
confusion_matrix = (
confusion_matrix.astype("float")
/ confusion_matrix.sum(axis=1)[:, np.newaxis]
)
logger.info(f"Normalized confusion matrix: \n{confusion_matrix}")
else:
logger.info(f"Confusion matrix, without normalization: \n{confusion_matrix}")
thresh = zmax / 2.0
for i, j in itertools.product(
range(confusion_matrix.shape[0]), range(confusion_matrix.shape[1])
):
plt.text(
j,
i,
confusion_matrix[i, j],
horizontalalignment="center",
color="white" if confusion_matrix[i, j] > thresh else "black",
)
plt.ylabel("True label")
plt.xlabel("Predicted label")
# save confusion matrix to file before showing it
if output_file:
fig = plt.gcf()
fig.set_size_inches(20, 20)
fig.savefig(output_file, bbox_inches="tight")
def _extract_paired_histogram_specification(
histogram_data: List[List[float]],
num_bins: int,
density: bool,
x_pad_fraction: float,
y_pad_fraction: float,
) -> Tuple[List[float], List[List[float]], List[float], Tuple[float, float]]:
"""Extracts all information from the data needed to plot a paired histogram.
Args:
histogram_data: Two data vectors
num_bins: Number of bins to be used for the histogram
density: If true, generate information for a probability density histogram
x_pad_fraction: Percentage of extra space in the horizontal direction
y_pad_fraction: Percentage of extra space in the vertical direction
Returns:
The bins, values, ranges of either x-axis, and the range of the y-axis
Raises:
ValueError: If histogram_data does not contain values.
"""
if not histogram_data or not np.concatenate(histogram_data).size:
rasa.shared.utils.io.raise_warning("No data to plot paired histogram.")
raise ValueError("No data to plot paired histogram.")
min_data_value = np.min(np.concatenate(histogram_data))
max_data_value = np.max(np.concatenate(histogram_data))
bin_width = (max_data_value - min_data_value) / num_bins
bins = [
min_data_value + i * bin_width
# `bins` describes the _boundaries_ of the bins, so we need
# 2 extra - one at the beginning and one at the end
for i in range(num_bins + 2)
]
histograms = [
# A list of counts - how often a value in `data` falls into a particular bin
np.histogram(data, bins=bins, density=density)[0]
for data in histogram_data
]
y_padding = 0.5 * bin_width + y_pad_fraction * bin_width
if density:
# Get the maximum count across both histograms, and scale it
# with `x_pad_fraction`
v = max([(1.0 + x_pad_fraction) * max(histogram) for histogram in histograms])
# When we plot the PDF, let both x-axes run to the same value
# so it's easier to compare visually
x_ranges = [v, v]
else:
# For the left and right histograms, get the largest counts and scale them
# by `x_pad_fraction` to get the maximum x-values displayed
x_ranges = [(1.0 + x_pad_fraction) * max(histogram) for histogram in histograms]
bin_of_first_non_zero_tally = min(
[(histogram != 0).argmax(axis=0) for histogram in histograms]
)
y_range = (
# Start plotting where the data starts (ignore empty bins at the low end)
bins[bin_of_first_non_zero_tally] - y_padding,
# The y_padding adds half a bin width, as we want the bars to be
# _centered_ on the bins. We take the next-to-last element of `bins`,
# because that is the beginning of the last bin.
bins[-2] + y_padding,
)
return bins, histograms, x_ranges, y_range
@_needs_matplotlib_backend
def plot_paired_histogram(
histogram_data: List[List[float]],
title: Text,
output_file: Optional[Text] = None,
num_bins: int = 25,
colors: Tuple[Text, Text] = ("#009292", "#920000"), # (dark cyan, dark red)
axes_label: Tuple[Text, Text] = ("Correct", "Wrong"),
frame_label: Tuple[Text, Text] = ("Number of Samples", "Confidence"),
density: bool = False,
x_pad_fraction: float = 0.05,
y_pad_fraction: float = 0.10,
) -> None:
"""Plots a side-by-side comparative histogram of the confidence distribution.
Args:
histogram_data: Two data vectors
title: Title to be displayed above the plot
output_file: File to save the plot to
num_bins: Number of bins to be used for the histogram
colors: Left and right bar colors as hex color strings
axes_label: Labels shown above the left and right histogram,
respectively
frame_label: Labels shown below and on the left of the
histogram, respectively
density: If true, generate a probability density histogram
x_pad_fraction: Percentage of extra space in the horizontal direction
y_pad_fraction: Percentage of extra space in the vertical direction
"""
if num_bins <= 2:
rasa.shared.utils.io.raise_warning(
f"Number {num_bins} of paired histogram bins must be at least 3."
)
return
try:
bins, tallies, x_ranges, y_range = _extract_paired_histogram_specification(
histogram_data,
num_bins,
density=density,
x_pad_fraction=x_pad_fraction,
y_pad_fraction=y_pad_fraction,
)
except (ValueError, TypeError) as e:
rasa.shared.utils.io.raise_warning(
f"Unable to plot paired histogram '{title}': {e}"
)
return
yticks = [float(f"{x:.2f}") for x in bins]
import matplotlib.pyplot as plt
plt.gcf().clear()
fig, axes = plt.subplots(ncols=2, sharey=True)
for side in range(2):
axes[side].barh(
bins[:-1],
tallies[side],
height=np.diff(bins),
align="center",
color=colors[side],
linewidth=1,
edgecolor="white",
)
axes[side].set(title=axes_label[side])
axes[side].set(yticks=yticks, xlim=(0, x_ranges[side]), ylim=y_range)
axes[0].yaxis.set_major_formatter(FormatStrFormatter("%.2f"))
axes[0].yaxis.set_minor_formatter(FormatStrFormatter("%.2f"))
axes[0].invert_xaxis()
axes[0].yaxis.tick_right()
# Add the title
fig.suptitle(title, fontsize="x-large", fontweight="bold")
# Add hidden plot to correctly add x and y labels (frame_label)
fig.add_subplot(111, frameon=False)
# Hide tick and tick label of the unused axis
plt.tick_params(labelcolor="none", top=False, bottom=False, left=False, right=False)
plt.xlabel(frame_label[0])
plt.ylabel(frame_label[1])
if output_file:
fig = plt.gcf()
fig.set_size_inches(10, 10)
fig.tight_layout(w_pad=0)
fig.savefig(output_file, bbox_inches="tight")
@_needs_matplotlib_backend
def plot_curve(
output_directory: Text,
number_of_examples: List[int],
x_label_text: Text,
y_label_text: Text,
graph_path: Text,
) -> None:
"""Plot the results from a model comparison.
Args:
output_directory: Output directory to save resulting plots to
number_of_examples: Number of examples per run
x_label_text: text for the x axis
y_label_text: text for the y axis
graph_path: output path of the plot
"""
import matplotlib.pyplot as plt
plt.gcf().clear()
ax = plt.gca()
# load results from file
data = rasa.shared.utils.io.read_json_file(
os.path.join(output_directory, RESULTS_FILE)
)
x = number_of_examples
# compute mean of all the runs for different configs
for label in data.keys():
if len(data[label]) == 0:
continue
mean = np.mean(data[label], axis=0)
std = np.std(data[label], axis=0)
ax.plot(x, mean, label=label, marker=".")
ax.fill_between(
x,
[m - s for m, s in zip(mean, std)],
[m + s for m, s in zip(mean, std)],
color="#6b2def",
alpha=0.2,
)
ax.legend(loc=4)
ax.set_xlabel(x_label_text)
ax.set_ylabel(y_label_text)
plt.savefig(graph_path, format="pdf")
logger.info(f"Comparison graph saved to '{graph_path}'.")
| 33.65097
| 88
| 0.645127
|
85f0b04e050370463db88e26b52d0d95d03acc22
| 4,232
|
py
|
Python
|
src/osd.py
|
vanzin/folderme
|
9a29eb0c221c005427bd0f8838d7e3941dbb84d7
|
[
"BSD-2-Clause"
] | null | null | null |
src/osd.py
|
vanzin/folderme
|
9a29eb0c221c005427bd0f8838d7e3941dbb84d7
|
[
"BSD-2-Clause"
] | null | null | null |
src/osd.py
|
vanzin/folderme
|
9a29eb0c221c005427bd0f8838d7e3941dbb84d7
|
[
"BSD-2-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-2-Clause
import app
import util
from PyQt5.QtCore import QPoint
from PyQt5.QtCore import QSize
from PyQt5.QtCore import QTimer
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QGuiApplication
from PyQt5.QtGui import QPixmap
def init():
Message.instance()
Track.instance()
def show_msg(msg):
Message.instance().show_osd(msg)
def show_track(track):
Track.instance().show_osd(track)
class BaseOSD:
_INSTANCE = None
_TIMER = None
_WINDOW = None
@classmethod
def instance(cls):
if not cls._INSTANCE:
cls._INSTANCE = cls()
return cls._INSTANCE
def __init__(self):
self.setFocusPolicy(Qt.NoFocus)
self.setWindowFlags(
Qt.FramelessWindowHint
| Qt.WindowStaysOnTopHint
| Qt.WindowDoesNotAcceptFocus
| Qt.Tool
)
self.setWindowOpacity(0.80)
def _pre_show(self):
if BaseOSD._TIMER:
BaseOSD._TIMER.stop()
BaseOSD._TIMER = None
if BaseOSD._WINDOW:
BaseOSD._WINDOW.close()
def _show_osd(self):
psize = self.sizeHint()
sw = QGuiApplication.primaryScreen().availableSize().width()
w = psize.width()
if psize.width() > sw:
w = sw / 2
self.resize(QSize(w, psize.height()))
y = 64
x = sw // 2 - w // 2
self.move(QPoint(x, y))
show_timer = QTimer()
show_timer.setSingleShot(True)
show_timer.timeout.connect(self._close)
show_timer.start(2000)
BaseOSD._TIMER = show_timer
BaseOSD._WINDOW = self
QTimer.singleShot(0, self.show)
def _close(self):
BaseOSD._TIMER = None
BaseOSD._WINDOW = None
self.close()
class Message(util.compile_ui("osd_msg.ui"), BaseOSD):
def __init__(self):
super().__init__()
BaseOSD.__init__(self)
def show_osd(self, msg):
self._pre_show()
self.lMessage.setText(msg)
self._show_osd()
class Track(util.compile_ui("osd.ui"), BaseOSD):
def __init__(self):
super().__init__()
BaseOSD.__init__(self)
util.EventBus.add(self)
def track_playing(self, track):
self.show_osd(track)
def track_paused(self, track):
self.show_osd(track)
def track_stopped(self, track):
self.show_osd(track)
def track_changed(self, track):
if not app.get().playlist.is_playing():
self.show_osd(track)
def show_osd(self, track):
self._pre_show()
if not track:
track = app.get().playlist.current_track()
if track:
track = track.info
status = "Stopped"
if app.get().playlist.is_playing():
status = "Playing"
elif app.get().playlist.is_paused():
status = "Paused"
if not track:
pixmap = QPixmap()
pixmap.load(util.icon("blank.jpg"))
util.set_pixmap(self.cover, pixmap)
self.artist.setText("")
self.album.setText("")
self.track.setText("")
else:
cover = track.cover_art()
pixmap = QPixmap()
if cover:
pixmap.loadFromData(cover)
else:
pixmap.load(util.icon("blank.jpg"))
util.set_pixmap(self.cover, pixmap)
self.artist.setText(track.artist)
self.album.setText(track.album)
self.track.setText(track.title)
self.status.setText(status)
self._show_osd()
if __name__ == "__main__":
import collection
import sys
import time
class Args(object):
pass
args = Args()
args.no_save = True
args.no_lastfm = True
app.init(args)
init()
def msg_test():
show_msg("Hello this is a message!")
def track_test():
t = collection.Track()
t.artist = "Artist"
t.album = "Album"
t.title = "Title"
show_track(t)
QTimer.singleShot(1000, msg_test)
QTimer.singleShot(3000, track_test)
QTimer.singleShot(5000, lambda: app.get().exit())
sys.exit(app.get().exec_())
| 23.511111
| 68
| 0.579868
|
bc0c2f7bfc9110e58a115deb01ceb601c6cbf309
| 2,166
|
py
|
Python
|
example/app.py
|
Titotix/Flask-pyoidc
|
87b8059617304ec634d7b6d3f5f649c40d9cdb4c
|
[
"Apache-2.0"
] | 64
|
2017-01-31T09:08:15.000Z
|
2021-12-21T21:05:45.000Z
|
example/app.py
|
Titotix/Flask-pyoidc
|
87b8059617304ec634d7b6d3f5f649c40d9cdb4c
|
[
"Apache-2.0"
] | 99
|
2017-02-08T22:38:54.000Z
|
2022-03-31T22:03:27.000Z
|
example/app.py
|
Titotix/Flask-pyoidc
|
87b8059617304ec634d7b6d3f5f649c40d9cdb4c
|
[
"Apache-2.0"
] | 33
|
2017-02-09T18:19:51.000Z
|
2021-12-24T17:48:52.000Z
|
import datetime
import flask
import logging
from flask import Flask, jsonify
from flask_pyoidc import OIDCAuthentication
from flask_pyoidc.provider_configuration import ProviderConfiguration, ClientMetadata
from flask_pyoidc.user_session import UserSession
app = Flask(__name__)
# See http://flask.pocoo.org/docs/0.12/config/
app.config.update({'OIDC_REDIRECT_URI': 'http://localhost:5000/redirect_uri',
'SECRET_KEY': 'dev_key', # make sure to change this!!
'PERMANENT_SESSION_LIFETIME': datetime.timedelta(days=7).total_seconds(),
'DEBUG': True})
ISSUER1 = 'https://provider1.example.com'
CLIENT1 = 'client@provider1'
PROVIDER_NAME1 = 'provider1'
PROVIDER_CONFIG1 = ProviderConfiguration(issuer=ISSUER1,
client_metadata=ClientMetadata(CLIENT1, 'secret1'))
ISSUER2 = 'https://provider2.example.com'
CLIENT2 = 'client@provider2'
PROVIDER_NAME2 = 'provider2'
PROVIDER_CONFIG2 = ProviderConfiguration(issuer=ISSUER2,
client_metadata=ClientMetadata(CLIENT2, 'secret2'))
auth = OIDCAuthentication({PROVIDER_NAME1: PROVIDER_CONFIG1, PROVIDER_NAME2: PROVIDER_CONFIG2})
@app.route('/')
@auth.oidc_auth(PROVIDER_NAME1)
def login1():
user_session = UserSession(flask.session)
return jsonify(access_token=user_session.access_token,
id_token=user_session.id_token,
userinfo=user_session.userinfo)
@app.route('/login2')
@auth.oidc_auth(PROVIDER_NAME2)
def login2():
user_session = UserSession(flask.session)
return jsonify(access_token=user_session.access_token,
id_token=user_session.id_token,
userinfo=user_session.userinfo)
@app.route('/logout')
@auth.oidc_logout
def logout():
return "You've been successfully logged out!"
@auth.error_view
def error(error=None, error_description=None):
return jsonify({'error': error, 'message': error_description})
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
auth.init_app(app)
app.run()
| 34.380952
| 107
| 0.703601
|
6d810a04155c5e9edac46e0b34a275c77de56444
| 2,064
|
py
|
Python
|
src/beefore/local.py
|
jacebrowning/beefore
|
76a43cfa3fb83cfa3b3301277636f1beca35aee7
|
[
"BSD-3-Clause"
] | 24
|
2016-07-14T04:25:31.000Z
|
2019-02-12T19:37:01.000Z
|
src/beefore/local.py
|
jacebrowning/beefore
|
76a43cfa3fb83cfa3b3301277636f1beca35aee7
|
[
"BSD-3-Clause"
] | 17
|
2016-07-17T10:51:56.000Z
|
2019-02-02T20:20:43.000Z
|
src/beefore/local.py
|
jacebrowning/beefore
|
76a43cfa3fb83cfa3b3301277636f1beca35aee7
|
[
"BSD-3-Clause"
] | 19
|
2016-07-15T01:05:00.000Z
|
2018-06-03T17:35:40.000Z
|
def unified_format(diff):
udiff = diff.diff.decode('utf-8').splitlines()
a_sha = diff.a_blob.hexsha[:7] if diff.a_blob else '0000000'
b_sha = diff.b_blob.hexsha[:7] if diff.b_blob else '0000000'
a_path = '{}'.format(diff.a_path) if diff.a_path else '/dev/null'
b_path = '{}'.format(diff.b_path) if diff.b_path else '/dev/null'
if diff.a_path is None:
content = [
'diff --git a/{a_path} b/{b_path}'.format(a_path=b_path, b_path=b_path),
'new file mode 100644',
'index {a_sha}..{b_sha}'.format(a_sha=a_sha, b_sha=b_sha),
'--- {a_path}'.format(a_path=a_path),
'+++ b/{b_path}'.format(b_path=b_path),
]
elif diff.b_path is None:
content = [
'diff --git a/{a_path} b/{b_path}'.format(a_path=a_path, b_path=a_path),
'deleted file mode 100644',
'index {a_sha}..{b_sha}'.format(a_sha=a_sha, b_sha=b_sha),
'--- a/{a_path}'.format(a_path=a_path),
'+++ {b_path}'.format(b_path=b_path),
]
else:
content = [
'diff --git a/{a_path} b/{b_path}'.format(a_path=a_path, b_path=b_path),
'index {a_sha}..{b_sha} 100644'.format(a_sha=a_sha, b_sha=b_sha),
'--- a/{a_path}'.format(a_path=a_path),
'+++ b/{b_path}'.format(b_path=b_path),
]
content.extend(udiff)
return content
def full_diff(repository, branch='master'):
content = []
for diff in repository.commit(branch).tree.diff(None, create_patch=True):
content.extend(unified_format(diff))
return content
def check(check_module, directory, repository, branch, verbosity):
print("Running %s check..." % check_module.__name__)
print('==========' * 8)
problems = check_module.check(
directory=directory,
diff_content=full_diff(repository, branch=branch),
commit={
'message': repository.commit().message
},
verbosity=verbosity,
)
print('==========' * 8)
return not problems
| 33.836066
| 84
| 0.58188
|
29b0d65b94452d636efb5ec501f89da6e1e9d1c4
| 4,493
|
py
|
Python
|
tensorwatch/evaler_utils.py
|
sytelus/longview
|
686e43cf187eaf55df18949359fd63d57dc337b2
|
[
"MIT"
] | null | null | null |
tensorwatch/evaler_utils.py
|
sytelus/longview
|
686e43cf187eaf55df18949359fd63d57dc337b2
|
[
"MIT"
] | null | null | null |
tensorwatch/evaler_utils.py
|
sytelus/longview
|
686e43cf187eaf55df18949359fd63d57dc337b2
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torch
import math
import random
from . import utils
from .lv_types import ImagePlotItem
from collections import OrderedDict
from itertools import groupby, islice
def skip_mod(mod, g):
for index, item in enumerate(g):
if index % mod == 0:
yield item
# sort keys, group by key, apply val function to each value in group, aggregate values
def groupby2(l, key=lambda x:x, val=lambda x:x, agg=lambda x:x, sort=True):
if sort:
l = sorted(l, key=key)
grp = ((k,v) for k,v in groupby(l, key=key))
valx = ((k, (val(x) for x in v)) for k,v in grp)
aggx = ((k, agg(v)) for k,v in valx)
return aggx
# aggregate weights or biases, use p2v to transform tensor to scaler
def agg_params(model, p2v, weight_or_bias=True):
for i, (n, p) in enumerate(model.named_parameters()):
if p.requires_grad:
is_bias = 'bias' in n
if (weight_or_bias and not is_bias) or (not weight_or_bias and is_bias):
yield i, p2v(p), n
# use this for image to class problems
def pyt_img_class_out_xform(item): # (net_input, target, in_weight, out_weight, net_output, loss)
net_input = item[0].data.cpu().numpy()
# turn log-probabilities in to (max log-probability, class ID)
net_output = torch.max(item[4],0)
# return image, text
return ImagePlotItem((net_input,), title="T:{},Pb:{:.2f},pd:{:.2f},L:{:.2f}".\
format(item[1], math.exp(net_output[0]), net_output[1], item[5]))
# use this for image to image translation problems
def pyt_img_img_out_xform(item): # (net_input, target, in_weight, out_weight, net_output, loss)
net_input = item[0].data.cpu().numpy()
net_output = item[4].data.cpu().numpy()
target = item[1].data.cpu().numpy()
tar_weight = item[3].data.cpu().numpy() if item[3] is not None else None
# return in-image, text, out-image, target-image
return ImagePlotItem((net_input, target, net_output, tar_weight),
title="L:{:.2f}, S:{:.2f}, {:.2f}-{:.2f}, {:.2f}-{:.2f}".\
format(item[5], net_input.std(), net_input.min(), net_input.max(), net_output.min(), net_output.max()))
def cols2rows(batch):
in_weight = utils.fill_like(batch.in_weight, batch.input)
tar_weight = utils.fill_like(batch.tar_weight, batch.input)
losses = [l.mean() for l in batch.loss_all]
targets = [t.item() if len(t.shape)==0 else t for t in batch.target]
return list(zip(batch.input, targets, in_weight, tar_weight,
batch.output, losses))
def top(l, topk=1, order='dsc', group_key=None, out_xform=lambda x:x):
min_result = OrderedDict()
for event_vars in l:
batch = cols2rows(event_vars.batch)
# by default group items in batch by target value
group_key = group_key or (lambda b: b[1]) #target
by_class = groupby2(batch, group_key)
# pick the first values for each class after sorting by loss
reverse, sf, ls_cmp = True, lambda b: b[5], False
if order=='asc':
reverse = False
elif order=='rnd':
ls_cmp, sf = True, lambda t: random.random()
elif order=='dsc':
pass
else:
raise ValueError('order parameter must be dsc, asc or rnd')
# sort grouped objects by sort function then
# take first k values in each group
# create (key, topk-sized list) tuples for each group
s = ((k, list(islice(sorted(v, key=sf, reverse=reverse), topk))) \
for k,v in by_class)
# for each group, maintain global k values for each keys
changed = False
for k,va in s:
# get global k values for this key, if it doesn't exist
# then put current in global min
cur_min = min_result.get(k, None)
if cur_min is None:
min_result[k] = va
changed = True
else:
# for each k value in this group, we will compare
for i, (va_k, cur_k) in enumerate(zip(va, cur_min)):
if ls_cmp or (reverse and cur_k[5] < va_k[5]) \
or (not reverse and cur_k[5] > va_k[5]):
cur_min[i] = va[i]
changed = True
if changed:
# flatten each list in dictionary value
yield (out_xform(t) for va in min_result.values() for t in va)
| 40.845455
| 129
| 0.60828
|
7173119a2efce48f3ee3913a4e79f18e38780150
| 15,891
|
py
|
Python
|
sdk/python/pulumi_azure_native/kusto/database_principal_assignment.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/kusto/database_principal_assignment.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/kusto/database_principal_assignment.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from ._enums import *
__all__ = ['DatabasePrincipalAssignmentArgs', 'DatabasePrincipalAssignment']
@pulumi.input_type
class DatabasePrincipalAssignmentArgs:
def __init__(__self__, *,
cluster_name: pulumi.Input[str],
database_name: pulumi.Input[str],
principal_id: pulumi.Input[str],
principal_type: pulumi.Input[Union[str, 'PrincipalType']],
resource_group_name: pulumi.Input[str],
role: pulumi.Input[Union[str, 'DatabasePrincipalRole']],
principal_assignment_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a DatabasePrincipalAssignment resource.
:param pulumi.Input[str] cluster_name: The name of the Kusto cluster.
:param pulumi.Input[str] database_name: The name of the database in the Kusto cluster.
:param pulumi.Input[str] principal_id: The principal ID assigned to the database principal. It can be a user email, application ID, or security group name.
:param pulumi.Input[Union[str, 'PrincipalType']] principal_type: Principal type.
:param pulumi.Input[str] resource_group_name: The name of the resource group containing the Kusto cluster.
:param pulumi.Input[Union[str, 'DatabasePrincipalRole']] role: Database principal role.
:param pulumi.Input[str] principal_assignment_name: The name of the Kusto principalAssignment.
:param pulumi.Input[str] tenant_id: The tenant id of the principal
"""
pulumi.set(__self__, "cluster_name", cluster_name)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "principal_type", principal_type)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "role", role)
if principal_assignment_name is not None:
pulumi.set(__self__, "principal_assignment_name", principal_assignment_name)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Input[str]:
"""
The name of the Kusto cluster.
"""
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> pulumi.Input[str]:
"""
The name of the database in the Kusto cluster.
"""
return pulumi.get(self, "database_name")
@database_name.setter
def database_name(self, value: pulumi.Input[str]):
pulumi.set(self, "database_name", value)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> pulumi.Input[str]:
"""
The principal ID assigned to the database principal. It can be a user email, application ID, or security group name.
"""
return pulumi.get(self, "principal_id")
@principal_id.setter
def principal_id(self, value: pulumi.Input[str]):
pulumi.set(self, "principal_id", value)
@property
@pulumi.getter(name="principalType")
def principal_type(self) -> pulumi.Input[Union[str, 'PrincipalType']]:
"""
Principal type.
"""
return pulumi.get(self, "principal_type")
@principal_type.setter
def principal_type(self, value: pulumi.Input[Union[str, 'PrincipalType']]):
pulumi.set(self, "principal_type", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group containing the Kusto cluster.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def role(self) -> pulumi.Input[Union[str, 'DatabasePrincipalRole']]:
"""
Database principal role.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: pulumi.Input[Union[str, 'DatabasePrincipalRole']]):
pulumi.set(self, "role", value)
@property
@pulumi.getter(name="principalAssignmentName")
def principal_assignment_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Kusto principalAssignment.
"""
return pulumi.get(self, "principal_assignment_name")
@principal_assignment_name.setter
def principal_assignment_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "principal_assignment_name", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The tenant id of the principal
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
class DatabasePrincipalAssignment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
principal_assignment_name: Optional[pulumi.Input[str]] = None,
principal_id: Optional[pulumi.Input[str]] = None,
principal_type: Optional[pulumi.Input[Union[str, 'PrincipalType']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[Union[str, 'DatabasePrincipalRole']]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Class representing a database principal assignment.
API Version: 2021-01-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_name: The name of the Kusto cluster.
:param pulumi.Input[str] database_name: The name of the database in the Kusto cluster.
:param pulumi.Input[str] principal_assignment_name: The name of the Kusto principalAssignment.
:param pulumi.Input[str] principal_id: The principal ID assigned to the database principal. It can be a user email, application ID, or security group name.
:param pulumi.Input[Union[str, 'PrincipalType']] principal_type: Principal type.
:param pulumi.Input[str] resource_group_name: The name of the resource group containing the Kusto cluster.
:param pulumi.Input[Union[str, 'DatabasePrincipalRole']] role: Database principal role.
:param pulumi.Input[str] tenant_id: The tenant id of the principal
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DatabasePrincipalAssignmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Class representing a database principal assignment.
API Version: 2021-01-01.
:param str resource_name: The name of the resource.
:param DatabasePrincipalAssignmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DatabasePrincipalAssignmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
principal_assignment_name: Optional[pulumi.Input[str]] = None,
principal_id: Optional[pulumi.Input[str]] = None,
principal_type: Optional[pulumi.Input[Union[str, 'PrincipalType']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[Union[str, 'DatabasePrincipalRole']]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DatabasePrincipalAssignmentArgs.__new__(DatabasePrincipalAssignmentArgs)
if cluster_name is None and not opts.urn:
raise TypeError("Missing required property 'cluster_name'")
__props__.__dict__["cluster_name"] = cluster_name
if database_name is None and not opts.urn:
raise TypeError("Missing required property 'database_name'")
__props__.__dict__["database_name"] = database_name
__props__.__dict__["principal_assignment_name"] = principal_assignment_name
if principal_id is None and not opts.urn:
raise TypeError("Missing required property 'principal_id'")
__props__.__dict__["principal_id"] = principal_id
if principal_type is None and not opts.urn:
raise TypeError("Missing required property 'principal_type'")
__props__.__dict__["principal_type"] = principal_type
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if role is None and not opts.urn:
raise TypeError("Missing required property 'role'")
__props__.__dict__["role"] = role
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["name"] = None
__props__.__dict__["principal_name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["tenant_name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:kusto:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-native:kusto/v20191109:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-nextgen:kusto/v20191109:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-native:kusto/v20200215:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-nextgen:kusto/v20200215:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-native:kusto/v20200614:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-nextgen:kusto/v20200614:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-native:kusto/v20200918:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-nextgen:kusto/v20200918:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-native:kusto/v20210101:DatabasePrincipalAssignment"), pulumi.Alias(type_="azure-nextgen:kusto/v20210101:DatabasePrincipalAssignment")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(DatabasePrincipalAssignment, __self__).__init__(
'azure-native:kusto:DatabasePrincipalAssignment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'DatabasePrincipalAssignment':
"""
Get an existing DatabasePrincipalAssignment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = DatabasePrincipalAssignmentArgs.__new__(DatabasePrincipalAssignmentArgs)
__props__.__dict__["name"] = None
__props__.__dict__["principal_id"] = None
__props__.__dict__["principal_name"] = None
__props__.__dict__["principal_type"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["role"] = None
__props__.__dict__["tenant_id"] = None
__props__.__dict__["tenant_name"] = None
__props__.__dict__["type"] = None
return DatabasePrincipalAssignment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> pulumi.Output[str]:
"""
The principal ID assigned to the database principal. It can be a user email, application ID, or security group name.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="principalName")
def principal_name(self) -> pulumi.Output[str]:
"""
The principal name
"""
return pulumi.get(self, "principal_name")
@property
@pulumi.getter(name="principalType")
def principal_type(self) -> pulumi.Output[str]:
"""
Principal type.
"""
return pulumi.get(self, "principal_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioned state of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
Database principal role.
"""
return pulumi.get(self, "role")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[Optional[str]]:
"""
The tenant id of the principal
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter(name="tenantName")
def tenant_name(self) -> pulumi.Output[str]:
"""
The tenant name of the principal
"""
return pulumi.get(self, "tenant_name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
| 45.402857
| 929
| 0.659745
|
1120c8c67967b5e8c126d1e1a865c0af61443bab
| 9,663
|
py
|
Python
|
verification/tests/core/do_test.py
|
hossein1387/pito_riscv
|
94df6f2201798765984017c82d1fdf0355f68d45
|
[
"MIT"
] | 3
|
2021-05-25T08:40:55.000Z
|
2022-01-05T07:05:39.000Z
|
verification/tests/irq/do_test.py
|
hossein1387/pito_riscv
|
94df6f2201798765984017c82d1fdf0355f68d45
|
[
"MIT"
] | 5
|
2021-07-25T01:12:16.000Z
|
2022-02-17T20:30:38.000Z
|
verification/tests/core/do_test.py
|
hossein1387/pito_riscv
|
94df6f2201798765984017c82d1fdf0355f68d45
|
[
"MIT"
] | 3
|
2021-08-11T07:59:07.000Z
|
2022-01-05T07:05:43.000Z
|
#!/usr/bin/env python3
#xelab bank64k_tester -L blk_mem_gen_v8_4_3
import os
import sys
import argparse
import subprocess
import utility as util
#=======================================================================
# Globals
#=======================================================================
simulator = None
result_dir = "../results"
# Use / to indicate that you are deleting a directory and not a file.
# Everything else is intrepreted as a file type.
files_to_clean = ["jou", "vcd", "pb", ".Xil/", "xsim.dir/", "log", "wdb", "str"]
#=======================================================================
# Utility Funcs
#=======================================================================
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--simulator', help='Simulator to use', required=False)
parser.add_argument('-f', '--files', help='Simulation files', required=False)
parser.add_argument('-m', '--vlogmacros', help='File containing Verilog global macros', required=False)
parser.add_argument('-l', '--libs', help='File containing list of simulation libraries', required=False)
parser.add_argument('-t', '--top_level', help='Top level module for Xilinx tools', required=False)
parser.add_argument('-g', '--gui', action='store_true', help=' gui mode supported in cadence irun only', required= False)
parser.add_argument('-w', '--waveform', action='store_true', help=' compile with waveform information', required= False)
parser.add_argument('-v', '--svseed', help=' sv seed supported in cadence irun and Xilinx xsim only', required= False)
parser.add_argument('-c', '--coverage', action='store_true', help='add coverage supported in cadence irun only', required= False)
parser.add_argument('-d', '--debug', action='store_true', help='create debug info supported in cadence irun only', required= False)
parser.add_argument('-clean', '--clean', action='store_true', help='clean project', required= False)
parser.add_argument('-silence', '--silence', action='store_true', help=' Silence mode (no log will be printed)', required= False, default=False)
parser.add_argument('-verbosity', '--verbosity', help='Print log verbosity: VERB_NONE, VERB_LOW, VERB_MEDIUM, VERB_HIGH, VERB_FULL, VERB_DEBUG', required=False)
parser.add_argument('-timescale', '--timescale', help='Simulation timescale', required=False, default='1ns/1ps')
parser.add_argument('--firmware', help='firmware file', required=False, default='test.hex')
args = parser.parse_args()
return vars(args)
def get_rtl_files(f_file):
sv_rtl = ""
vhdl_rtl = ""
v_rtl = ""
with open(f_file, 'r') as f:
rtls = f.readlines()
for rtl in rtls:
rtl = rtl.replace("\n", "")
if rtl != "":
if rtl.lower().endswith(".vhdl") or rtl.lower().endswith(".vhd"):
vhdl_rtl += "{0} ".format(rtl)
elif rtl.lower().endswith(".sv") or rtl.lower().endswith(".svh"):
sv_rtl += "{0} ".format(rtl)
elif rtl.lower().endswith(".v") or rtl.lower().endswith(".vh"):
v_rtl += "{0} ".format(rtl)
else:
util.print_log("unsupported file format: {0}".format(rtl), "ERROR", verbosity="VERB_LOW")
sys.exit()
# import ipdb as pdb; pdb.set_trace()
return sv_rtl, v_rtl, vhdl_rtl
def get_vlogmacros(f_file):
vlogmacros = ""
with open(f_file, 'r') as f:
macros = f.readlines()
for macro in macros:
if macro != "":
macro = macro.replace("\n", "")
vlogmacros += " -d " + macro + " "
return vlogmacros
def get_libs(f_file):
# import ipdb as pdb; pdb.set_trace()
libs = ""
with open(f_file, 'r') as f:
libslist = f.readlines()
for lib in libslist:
if lib != "":
lib = lib.replace("\n", "")
libs += " -L " + lib + " "
return libs
#=======================================================================
# Main
#=======================================================================
if __name__ == '__main__':
cmd_to_run = ""
args = parse_args()
simulator = args['simulator']
top_level = args['top_level']
files = args['files']
vlogmacros_file = args['vlogmacros']
libs_file = args['libs']
gui = args['gui']
svseed = args['svseed']
coverage = args['coverage']
debug = args['debug']
waveform = args['waveform']
clean = args['clean']
silence = args['silence']
verbosity = args['verbosity']
if verbosity is None:
verbosity = 'VERB_LOW'
if util.get_platform(verbosity=verbosity) != "linux":
util.print_log("This script works only on a Linux platform", "ERROR", verbosity="VERB_LOW")
sys.exit()
if clean:
util.print_banner("Cleaning project", verbosity=verbosity)
util.clean_proj(files_to_clean)
if not os.path.exists(result_dir):
util.print_log("Creating a result directory in {0}".format(result_dir), "INFO", verbosity="VERB_LOW")
os.makedirs(result_dir)
if simulator == None:
util.print_log("You need to provide Simulator name", "ERROR", verbosity="VERB_LOW")
sys.exit()
# Load Verilog macros file, if specified
vlogmacros = ""
if vlogmacros_file is not None:
if os.path.exists(vlogmacros_file):
vlogmacros = get_vlogmacros(vlogmacros_file)
else:
util.print_log("Verilog macros file not found!", "ERROR", verbosity="VERB_LOW")
sys.exit()
# Load list of simulation libraries from file, if specified
libs = ""
if libs_file is not None:
if os.path.exists(libs_file):
libs = get_libs(libs_file)
else:
util.print_log("Library list file not found!", "ERROR", verbosity="VERB_LOW")
sys.exit()
if simulator.lower() == "xilinx":
# For Xilinx tools we need to specify top level for creating snapshots which is needed
# by simulator and synthesis tools
if not('XILINX_VIVADO' in os.environ):
util.print_log("Xilinx Vivado simulator was not found, forgot to source it?", "ERROR", verbosity="VERB_LOW")
sys.exit()
if top_level == None:
util.print_log("Top level was not specified", "ERROR", verbosity="VERB_LOW")
sys.exit()
util.print_banner("Compiling input files", verbosity=verbosity)
if files == None:
util.print_log("You need to provide f-file", "ERROR", verbosity="VERB_LOW")
sys.exit()
sv_rtl, v_rtl, vhdl_rtl = get_rtl_files(files)
# import ipdb as pdb; pdb.set_trace()
if sv_rtl != "":
cmd_to_run = "xvlog --sv {0} ".format(sv_rtl)
cmd_to_run += vlogmacros
if silence:
cmd_to_run += "> /dev/null"
util.run_command(cmd_to_run, split=False, verbosity=verbosity)
if v_rtl != "":
cmd_to_run = "xvlog {0} ".format(v_rtl)
cmd_to_run += vlogmacros
if silence:
cmd_to_run += "> /dev/null"
util.run_command(cmd_to_run, split=False, verbosity=verbosity)
if vhdl_rtl != "":
cmd_to_run = "xvhdl {0} ".format(vhdl_rtl)
if silence:
cmd_to_run += "> /dev/null"
util.run_command(cmd_to_run, split=False, verbosity=verbosity)
util.print_banner("Creating snapshot", verbosity=verbosity)
# cmd_to_run = "xelab {0} ".format(top_level)
# import ipdb as pdb; pdb.set_trace()
cmd_to_run = "xelab -debug typical -L secureip -L unisims_ver -L unimacro_ver {0} ".format(top_level)
if libs_file:
cmd_to_run += libs
if waveform:
cmd_to_run += " --debug all "
if silence:
cmd_to_run += "> /dev/null"
if args['timescale'] != None:
cmd_to_run += "--timescale {} ".format(args['timescale'])
util.run_command(cmd_to_run, split=False, verbosity=verbosity)
util.print_banner("Running simulation", verbosity=verbosity)
if gui:
cmd_to_run = "xsim --g {0} ".format(top_level)
else:
cmd_to_run = "xsim -R {0} ".format(top_level)
if svseed:
cmd_to_run += "-sv_seed {0} ".format(svseed)
if args['firmware'] != None:
cmd_to_run += "-testplusarg firmware={} ".format(args['firmware'])
if silence:
cmd_to_run += "> /dev/null"
util.run_command(cmd_to_run, split=False, verbosity=verbosity)
elif simulator.lower() == "iverilog":
util.print_banner("Running iverilog Simulation", verbosity=verbosity)
cmd_to_run = "iverilog -Wall -g2012 -f {0} && unbuffer vvp {1}/result.out".format(files, result_dir)
util.run_command(cmd_to_run, split=False, verbosity=verbosity)
elif simulator.lower() == "irun":
iruns_args = ""
util.print_banner("Running Cadence irun Simulation", verbosity=verbosity)
if gui:
iruns_args += "gui "
if svseed:
iruns_args += "svseed {0} ".format(svseed)
if coverage:
iruns_args += "coverage "
if debug:
iruns_args += "debug "
cmd_to_run = "irun +access+rwc -f {0} {1}".format(files, iruns_args)
util.run_command(cmd_to_run, verbosity=verbosity)
else:
util.print_log("Unknown simulator {0}".format(simulator), "ERROR", verbosity="VERB_LOW")
sys.exit()
| 44.325688
| 164
| 0.579634
|
dd256a0c7c975bd65f8c3aeda6c144dcf5ffa430
| 125,008
|
py
|
Python
|
tests/system_tests_edge_router.py
|
nicob87/qpid-dispatch
|
f62c85f40bbd77776d0bddea66938fe9319deaa5
|
[
"Apache-2.0"
] | null | null | null |
tests/system_tests_edge_router.py
|
nicob87/qpid-dispatch
|
f62c85f40bbd77776d0bddea66938fe9319deaa5
|
[
"Apache-2.0"
] | 3
|
2019-09-30T03:11:04.000Z
|
2020-03-06T17:15:54.000Z
|
tests/system_tests_edge_router.py
|
irinabov/debian-qpid-dispatch
|
42fb2ffb65f8e8c8d616633c0b4308d6531a281d
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from time import sleep
from threading import Event
from threading import Timer
from proton import Message, Timeout
from system_test import TestCase, Qdrouterd, main_module, TIMEOUT, MgmtMsgProxy
from system_test import AsyncTestReceiver
from system_test import AsyncTestSender
from system_test import Logger
from system_test import QdManager
from system_test import unittest
from system_tests_link_routes import ConnLinkRouteService
from test_broker import FakeService
from proton.handlers import MessagingHandler
from proton.reactor import Container, DynamicNodeProperties
from proton.utils import BlockingConnection
from qpid_dispatch.management.client import Node
from qpid_dispatch_internal.tools.command import version_supports_mutually_exclusive_arguments
from subprocess import PIPE, STDOUT
import re
class AddrTimer(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.check_address()
class EdgeRouterTest(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
"""Start a router"""
super(EdgeRouterTest, cls).setUpClass()
def router(name, mode, connection, extra=None):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'multiTenant': 'yes'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'role': 'route-container'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'in', 'containerId': 'LRC'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'out', 'containerId': 'LRC'}),
('autoLink', {'address': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'in'}),
('autoLink', {'address': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'out'}),
('address', {'prefix': 'closest', 'distribution': 'closest'}),
('address', {'prefix': 'spread', 'distribution': 'balanced'}),
('address', {'prefix': 'multicast', 'distribution': 'multicast'}),
('address', {'prefix': '0.0.0.0/queue', 'waypoint': 'yes'}),
connection
]
if extra:
config.append(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
edge_port_A = cls.tester.get_port()
edge_port_B = cls.tester.get_port()
router('INT.A', 'interior', ('listener', {'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_A}))
router('INT.B', 'interior', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_B}))
router('EA1', 'edge', ('connector', {'name': 'edge', 'role': 'edge',
'port': edge_port_A}
),
('connector', {'name': 'edge.1', 'role': 'edge',
'port': edge_port_B}
)
)
cls.routers[0].wait_router_connected('INT.B')
cls.routers[1].wait_router_connected('INT.A')
# 1 means skip that test.
cls.skip = { 'test_01' : 0
}
def __init__(self, test_method):
TestCase.__init__(self, test_method)
self.success = False
self.timer_delay = 2
self.max_attempts = 3
self.attempts = 0
def run_qdstat(self, args, regexp=None, address=None):
p = self.popen(
['qdstat', '--bus', str(address or self.router.addresses[0]),
'--timeout', str(TIMEOUT)] + args,
name='qdstat-' + self.id(), stdout=PIPE, expect=None,
universal_newlines=True)
out = p.communicate()[0]
assert p.returncode == 0, \
"qdstat exit status %s, output:\n%s" % (p.returncode, out)
if regexp: assert re.search(regexp, out,
re.I), "Can't find '%s' in '%s'" % (
regexp, out)
return out
def can_terminate(self):
if self.attempts == self.max_attempts:
return True
if self.success:
return True
return False
def run_int_b_edge_qdstat(self):
outs = self.run_qdstat(['--edge'],
address=self.routers[2].addresses[0])
lines = outs.split("\n")
for line in lines:
if "INT.B" in line and "yes" in line:
self.success = True
def run_int_a_edge_qdstat(self):
outs = self.run_qdstat(['--edge'],
address=self.routers[2].addresses[0])
lines = outs.split("\n")
for line in lines:
if "INT.A" in line and "yes" in line:
self.success = True
def schedule_int_a_qdstat_test(self):
if self.attempts < self.max_attempts:
if not self.success:
Timer(self.timer_delay, self.run_int_a_edge_qdstat).start()
self.attempts += 1
def schedule_int_b_qdstat_test(self):
if self.attempts < self.max_attempts:
if not self.success:
Timer(self.timer_delay, self.run_int_b_edge_qdstat).start()
self.attempts += 1
def test_01_active_flag(self):
"""
In this test, we have one edge router connected to two interior
routers. One connection is to INT.A and another connection is to
INT.B . But only one of these connections is active. We use qdstat
to make sure that only one of these connections is active.
Then we kill the router with the active connection and make sure
that the other connection is now the active one
"""
if self.skip [ 'test_01' ] :
self.skipTest ( "Test skipped during development." )
success = False
outs = self.run_qdstat(['--edge'],
address=self.routers[0].addresses[0])
lines = outs.split("\n")
for line in lines:
if "EA1" in line and "yes" in line:
success = True
if not success:
self.fail("Active edge connection not found not found for "
"interior router")
outs = self.run_qdstat(['--edge'],
address=self.routers[2].addresses[0])
conn_map_edge = dict()
#
# We dont know which interior router the edge will connect to.
#
conn_map_edge["INT.A"] = False
conn_map_edge["INT.B"] = False
lines = outs.split("\n")
for line in lines:
if "INT.A" in line and "yes" in line:
conn_map_edge["INT.A"] = True
if "INT.B" in line and "yes" in line:
conn_map_edge["INT.B"] = True
if conn_map_edge["INT.A"] and conn_map_edge["INT.B"]:
self.fail("Edhe router has two active connections to interior "
"routers. Should have only one")
if not conn_map_edge["INT.A"] and not conn_map_edge["INT.B"]:
self.fail("There are no active aconnections to interior routers")
if conn_map_edge["INT.A"]:
#
# INT.A has the active connection. Let's kill INT.A and see
# if the other connection becomes active
#
EdgeRouterTest.routers[0].teardown()
self.schedule_int_b_qdstat_test()
while not self.can_terminate():
pass
self.assertTrue(self.success)
elif conn_map_edge["INT.B"]:
#
# INT.B has the active connection. Let's kill INT.B and see
# if the other connection becomes active
#
EdgeRouterTest.routers[1].teardown()
self.schedule_int_a_qdstat_test()
while not self.can_terminate():
pass
self.assertTrue(self.success)
class RouterTest(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
"""Start a router"""
super(RouterTest, cls).setUpClass()
def router(name, mode, connection, extra=None):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'multiTenant': 'yes'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'role': 'route-container'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'in', 'containerId': 'LRC'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'out', 'containerId': 'LRC'}),
('autoLink', {'address': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'in'}),
('autoLink', {'address': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'out'}),
('address', {'prefix': 'closest', 'distribution': 'closest'}),
('address', {'prefix': 'spread', 'distribution': 'balanced'}),
('address', {'prefix': 'multicast', 'distribution': 'multicast'}),
('address', {'prefix': '0.0.0.0/queue', 'waypoint': 'yes'}),
connection
]
if extra:
config.append(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
edge_port_A = cls.tester.get_port()
edge_port_B = cls.tester.get_port()
router('INT.A', 'interior', ('listener', {'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_A}))
router('INT.B', 'interior', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_B}))
router('EA1', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_A}))
router('EA2', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_A}))
router('EB1', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_B}))
router('EB2', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_B}))
cls.routers[0].wait_router_connected('INT.B')
cls.routers[1].wait_router_connected('INT.A')
cls.skip = { 'test_01' : 0,
'test_02' : 0,
'test_03' : 0,
'test_04' : 0,
'test_05' : 0,
'test_06' : 0,
'test_07' : 0,
'test_08' : 0,
'test_09' : 0,
'test_10' : 0,
'test_11' : 0,
'test_12' : 0,
'test_13' : 0,
'test_14' : 0,
'test_15' : 0,
'test_16' : 0,
'test_17' : 0,
'test_18' : 0,
'test_19' : 0,
'test_20' : 0,
'test_21' : 0,
'test_22' : 0,
'test_23' : 0,
'test_24' : 0,
'test_25' : 0,
'test_26' : 0,
'test_27' : 0,
'test_28' : 0,
'test_29' : 0,
'test_30' : 0,
'test_31' : 0,
'test_32' : 0,
'test_33' : 0,
'test_34' : 0,
'test_35' : 0,
'test_36' : 0,
'test_37' : 0,
'test_38' : 0,
'test_39' : 0,
'test_40' : 0,
'test_41' : 0,
'test_42' : 0,
'test_43': 0,
'test_44': 0,
'test_45': 0,
'test_46': 0,
'test_47': 0,
'test_48': 0,
'test_49': 0,
'test_50': 0,
'test_51': 0,
'test_52': 0,
'test_53': 0,
'test_54': 0,
'test_55': 0,
'test_56': 0,
'test_57': 0,
'test_58': 0,
'test_59': 0,
'test_60': 0,
'test_61': 0,
'test_62': 0,
'test_63': 0,
'test_64': 0,
'test_65': 0,
'test_66': 0,
'test_67': 0,
'test_68': 0,
'test_69': 0,
'test_70': 0,
'test_71': 0,
'test_72': 0,
'test_73': 0
}
def test_01_connectivity_INTA_EA1(self):
if self.skip [ 'test_01' ] :
self.skipTest ( "Test skipped during development." )
test = ConnectivityTest(self.routers[0].addresses[0],
self.routers[2].addresses[0],
'EA1')
test.run()
self.assertEqual(None, test.error)
def test_02_connectivity_INTA_EA2(self):
if self.skip [ 'test_02' ] :
self.skipTest ( "Test skipped during development." )
test = ConnectivityTest(self.routers[0].addresses[0],
self.routers[3].addresses[0],
'EA2')
test.run()
self.assertEqual(None, test.error)
def test_03_connectivity_INTB_EB1(self):
if self.skip [ 'test_03' ] :
self.skipTest ( "Test skipped during development." )
test = ConnectivityTest(self.routers[1].addresses[0],
self.routers[4].addresses[0],
'EB1')
test.run()
self.assertEqual(None, test.error)
def test_04_connectivity_INTB_EB2(self):
if self.skip [ 'test_04' ] :
self.skipTest ( "Test skipped during development." )
test = ConnectivityTest(self.routers[1].addresses[0],
self.routers[5].addresses[0],
'EB2')
test.run()
self.assertEqual(None, test.error)
def test_05_dynamic_address_same_edge(self):
if self.skip [ 'test_05' ] :
self.skipTest ( "Test skipped during development." )
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[2].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_06_dynamic_address_interior_to_edge(self):
if self.skip [ 'test_06' ] :
self.skipTest ( "Test skipped during development." )
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_07_dynamic_address_edge_to_interior(self):
if self.skip [ 'test_07' ] :
self.skipTest ( "Test skipped during development." )
test = DynamicAddressTest(self.routers[0].addresses[0],
self.routers[2].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_08_dynamic_address_edge_to_edge_one_interior(self):
if self.skip [ 'test_08' ] :
self.skipTest ( "Test skipped during development." )
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[3].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_09_dynamic_address_edge_to_edge_two_interior(self):
if self.skip [ 'test_09' ] :
self.skipTest ( "Test skipped during development." )
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[4].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_10_mobile_address_same_edge(self):
if self.skip [ 'test_10' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
"test_10")
test.run()
self.assertEqual(None, test.error)
def test_11_mobile_address_interior_to_edge(self):
if self.skip [ 'test_11' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_11")
test.run()
self.assertEqual(None, test.error)
def test_12_mobile_address_edge_to_interior(self):
if self.skip [ 'test_12' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressTest(self.routers[0].addresses[0],
self.routers[2].addresses[0],
"test_12")
test.run()
if test.error is not None:
test.logger.dump()
self.assertEqual(None, test.error)
def test_13_mobile_address_edge_to_edge_one_interior(self):
if self.skip [ 'test_13' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
"test_13")
test.run()
self.assertEqual(None, test.error)
def test_14_mobile_address_edge_to_edge_two_interior(self):
if self.skip [ 'test_14' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
"test_14")
test.run()
self.assertEqual(None, test.error)
# One sender two receiver tests.
# One sender and two receivers on the same edge
def test_15_mobile_address_same_edge(self):
if self.skip [ 'test_15' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"test_15")
test.run()
self.assertEqual(None, test.error)
# One sender and two receivers on the different edges. The edges are
# hanging off the same interior router.
def test_16_mobile_address_edge_to_another_edge_same_interior(self):
if self.skip [ 'test_16' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
"test_16")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the interior and sender on the edge
def test_17_mobile_address_edge_to_interior(self):
if self.skip [ 'test_17' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"test_17")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the edge and the sender on the interior
def test_18_mobile_address_interior_to_edge(self):
if self.skip [ 'test_18' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_18")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the edge and the sender on the 'other' interior
def test_19_mobile_address_other_interior_to_edge(self):
if self.skip [ 'test_19' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[1].addresses[0],
"test_19")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the edge and the sender on the edge of
# the 'other' interior
def test_20_mobile_address_edge_to_edge_two_interiors(self):
if self.skip [ 'test_20' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[5].addresses[0],
"test_20")
test.run()
self.assertEqual(None, test.error)
# One receiver in an edge, another one in interior and the sender
# is on the edge of another interior
def test_21_mobile_address_edge_interior_receivers(self):
if self.skip [ 'test_21' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[4].addresses[0],
self.routers[1].addresses[0],
self.routers[2].addresses[0],
"test_21")
test.run()
self.assertEqual(None, test.error)
# Two receivers one on each interior router and and an edge sender
# connectoed to the first interior
def test_22_mobile_address_edge_sender_two_interior_receivers(self):
if self.skip [ 'test_22' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
self.routers[3].addresses[0],
"test_22")
test.run()
self.assertEqual(None, test.error)
def test_23_mobile_address_edge_sender_two_edge_receivers(self):
if self.skip [ 'test_23' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressOneSenderTwoReceiversTest(self.routers[4].addresses[0],
self.routers[5].addresses[0],
self.routers[2].addresses[0],
"test_23")
test.run()
self.assertEqual(None, test.error)
# 1 Sender and 3 receivers all on the same edge
def test_24_multicast_mobile_address_same_edge(self):
if self.skip [ 'test_24' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.24")
test.run()
self.assertEqual(None, test.error)
# 1 Sender and receiver on one edge and 2 receivers on another edge
# all in the same interior
def test_25_multicast_mobile_address_different_edges_same_interior(self):
if self.skip [ 'test_25' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
"multicast.25",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
# Two receivers on each edge, one receiver on interior and sender
# on the edge
def test_26_multicast_mobile_address_edge_to_interior(self):
if self.skip [ 'test_26' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"multicast.26",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on the interior
def test_27_multicast_mobile_address_interior_to_edge(self):
if self.skip [ 'test_27' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
"multicast.27",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on an interior that is not connected
# to the edges.
def test_28_multicast_mobile_address_other_interior_to_edge(self):
if self.skip [ 'test_28' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[1].addresses[0],
"multicast.28")
test.run()
self.assertEqual(None, test.error)
# Sender on an interior and 3 receivers connected to three different edges
def test_29_multicast_mobile_address_edge_to_edge_two_interiors(self):
if self.skip [ 'test_29' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[0].addresses[0],
"multicast.29")
test.run()
self.assertEqual(None, test.error)
def test_30_multicast_mobile_address_all_edges(self):
if self.skip [ 'test_30' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
"multicast.30",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
######### Multicast Large message tests ######################
# 1 Sender and 3 receivers all on the same edge
def test_31_multicast_mobile_address_same_edge(self):
if self.skip [ 'test_31' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.31", large_msg=True)
test.run()
self.assertEqual(None, test.error)
# 1 Sender on one edge and 3 receivers on another edge all in the same
# interior
def test_32_multicast_mobile_address_different_edges_same_interior(self):
if self.skip [ 'test_32' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
"multicast.32",
self.routers[0].addresses[0],
large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Two receivers on each edge, one receiver on interior and sender
# on the edge
def test_33_multicast_mobile_address_edge_to_interior(self):
if self.skip [ 'test_33' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"multicast.33", large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on the interior
def test_34_multicast_mobile_address_interior_to_edge(self):
if self.skip [ 'test_34' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
"multicast.34", large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on an interior that is not connected
# to the edges.
def test_35_multicast_mobile_address_other_interior_to_edge(self):
if self.skip [ 'test_35' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[1].addresses[0],
"multicast.35",
self.routers[0].addresses[0],
large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Sender on an interior and 3 receivers connected to three different edges
def test_36_multicast_mobile_address_edge_to_edge_two_interiors(self):
if self.skip [ 'test_36' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[0].addresses[0],
"multicast.36", large_msg=True)
test.run()
self.assertEqual(None, test.error)
def test_37_multicast_mobile_address_all_edges(self):
if self.skip [ 'test_37' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
"multicast.37",
self.routers[0].addresses[0],
large_msg=True)
test.run()
self.assertEqual(None, test.error)
def test_38_mobile_addr_event_three_receivers_same_interior(self):
if self.skip [ 'test_38' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressEventTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_38")
test.run()
self.assertEqual(None, test.error)
def test_39_mobile_addr_event_three_receivers_diff_interior(self):
if self.skip [ 'test_39' ] :
self.skipTest ( "Test skipped during development." )
# This will test the QDRC_EVENT_ADDR_TWO_DEST event
test = MobileAddressEventTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_39")
test.run()
self.assertEqual(None, test.error)
def test_40_drop_rx_client_multicast_large_message(self):
if self.skip [ 'test_40' ] :
self.skipTest ( "Test skipped during development." )
# test what happens if some multicast receivers close in the middle of
# a multiframe transfer
test = MobileAddrMcastDroppedRxTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.40")
test.run()
self.assertEqual(None, test.error)
def test_41_drop_rx_client_multicast_small_message(self):
if self.skip [ 'test_41' ] :
self.skipTest ( "Test skipped during development." )
# test what happens if some multicast receivers close in the middle of
# a multiframe transfer
test = MobileAddrMcastDroppedRxTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.40",large_msg=False)
test.run()
self.assertEqual(None, test.error)
def test_42_anon_sender_mobile_address_same_edge(self):
if self.skip [ 'test_42' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
"test_42")
test.run()
self.assertEqual(None, test.error)
def test_43_anon_sender_mobile_address_interior_to_edge(self):
if self.skip [ 'test_43' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_43")
test.run()
self.assertEqual(None, test.error)
def test_44_anon_sender_mobile_address_edge_to_interior(self):
if self.skip [ 'test_44' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[0].addresses[0],
self.routers[2].addresses[0],
"test_44")
test.run()
self.assertEqual(None, test.error)
def test_45_anon_sender_mobile_address_edge_to_edge_one_interior(self):
if self.skip [ 'test_45' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
"test_45")
test.run()
self.assertEqual(None, test.error)
def test_46_anon_sender_mobile_address_edge_to_edge_two_interior(self):
if self.skip [ 'test_46' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
"test_46")
test.run()
self.assertEqual(None, test.error)
def test_47_anon_sender_mobile_address_large_msg_same_edge(self):
if self.skip [ 'test_47' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
"test_47", True)
test.run()
self.assertEqual(None, test.error)
def test_48_anon_sender_mobile_address_large_msg_interior_to_edge(self):
if self.skip [ 'test_48' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_48", True)
test.run()
self.assertEqual(None, test.error)
def test_49_anon_sender_mobile_address_large_msg_edge_to_interior(self):
if self.skip [ 'test_49' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[0].addresses[0],
self.routers[2].addresses[0],
"test_49", True)
test.run()
self.assertEqual(None, test.error)
def test_50_anon_sender_mobile_address_large_msg_edge_to_edge_one_interior(self):
if self.skip [ 'test_50' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
"test_50", True)
test.run()
self.assertEqual(None, test.error)
def test_51_anon_sender_mobile_address_large_msg_edge_to_edge_two_interior(self):
if self.skip [ 'test_51' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressAnonymousTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
"test_51", True)
test.run()
self.assertEqual(None, test.error)
# 1 Sender and 3 receivers all on the same edge
def test_52_anon_sender_multicast_mobile_address_same_edge(self):
if self.skip [ 'test_52' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.52", anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# 1 Sender and receiver on one edge and 2 receivers on another edge
# all in the same interior
def test_53_anon_sender_multicast_mobile_address_different_edges_same_interior(self):
if self.skip [ 'test_53' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
"multicast.53",
self.routers[0].addresses[0],
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Two receivers on each edge, one receiver on interior and sender
# on the edge
def test_54_anon_sender_multicast_mobile_address_edge_to_interior(self):
if self.skip [ 'test_54' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"multicast.54",
self.routers[0].addresses[0],
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on the interior
def test_55_anon_sender_multicast_mobile_address_interior_to_edge(self):
if self.skip [ 'test_55' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
"multicast.55",
self.routers[0].addresses[0],
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on an interior that is not connected
# to the edges.
def test_56_anon_sender_multicast_mobile_address_other_interior_to_edge(self):
if self.skip [ 'test_56' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[1].addresses[0],
"multicast.56",
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Sender on an interior and 3 receivers connected to three different edges
def test_57_anon_sender_multicast_mobile_address_edge_to_edge_two_interiors(self):
if self.skip [ 'test_57' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[0].addresses[0],
"multicast.57",
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
def test_58_anon_sender_multicast_mobile_address_all_edges(self):
if self.skip [ 'test_58' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
"multicast.58",
self.routers[0].addresses[0],
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
######### Multicast Large message anon sender tests ####################
# 1 Sender and 3 receivers all on the same edge
def test_59_anon_sender__multicast_mobile_address_same_edge(self):
if self.skip [ 'test_59' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.59",
large_msg=True,
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# 1 Sender on one edge and 3 receivers on another edge all in the same
# interior
def test_60_anon_sender_multicast_mobile_address_different_edges_same_interior(self):
if self.skip [ 'test_60' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
"multicast.60",
self.routers[0].addresses[0],
large_msg=True,
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Two receivers on each edge, one receiver on interior and sender
# on the edge
def test_61_anon_sender_multicast_mobile_address_edge_to_interior(self):
if self.skip [ 'test_61' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"multicast.61",
self.routers[3].addresses[0],
large_msg=True,
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on the interior
def test_62_anon_sender_multicast_mobile_address_interior_to_edge(self):
if self.skip [ 'test_62' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
"multicast.62",
large_msg=True,
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on an interior that is not connected
# to the edges.
def test_63_anon_sender_multicast_mobile_address_other_interior_to_edge(self):
if self.skip [ 'test_63' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[1].addresses[0],
"multicast.63",
self.routers[0].addresses[0],
large_msg=True,
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
# Sender on an interior and 3 receivers connected to three different edges
def test_64_anon_sender_multicast_mobile_address_edge_to_edge_two_interiors(self):
if self.skip [ 'test_64' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[0].addresses[0],
"multicast.64",
large_msg=True,
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
def test_65_anon_sender_multicast_mobile_address_all_edges(self):
if self.skip [ 'test_65' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
"multicast.65",
self.routers[0].addresses[0],
large_msg=True,
anon_sender=True)
test.run()
self.assertEqual(None, test.error)
def test_66_anon_sender_drop_rx_client_multicast_large_message(self):
# test what happens if some multicast receivers close in the middle of
# a multiframe transfer. The sender is an anonymous sender.
if self.skip [ 'test_66' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddrMcastAnonSenderDroppedRxTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.66")
test.run()
self.assertEqual(None, test.error)
def test_67_drop_rx_client_multicast_small_message(self):
# test what happens if some multicast receivers close in the middle of
# a multiframe transfer. The sender is an anonymous sender.
if self.skip [ 'test_67' ] :
self.skipTest ( "Test skipped during development." )
test = MobileAddrMcastAnonSenderDroppedRxTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.67",
large_msg=False)
test.run()
self.assertEqual(None, test.error)
def run_qdstat(self, args, regexp=None, address=None):
if args:
popen_arg = ['qdstat', '--bus', str(address or self.router.addresses[0]),
'--timeout', str(TIMEOUT)] + args
else:
popen_arg = ['qdstat', '--bus',
str(address or self.router.addresses[0]),
'--timeout', str(TIMEOUT)]
p = self.popen(popen_arg,
name='qdstat-' + self.id(), stdout=PIPE, expect=None,
universal_newlines=True)
out = p.communicate()[0]
assert p.returncode == 0, \
"qdstat exit status %s, output:\n%s" % (p.returncode, out)
if regexp: assert re.search(regexp, out,
re.I), "Can't find '%s' in '%s'" % (
regexp, out)
return out
def test_68_edge_qdstat_all_routers(self):
# Connects to an edge router and runs "qdstat --all-routers"
# "qdstat --all-routers" is same as "qdstat --all-routers --g"
# Connecting to an edge router and running "qdstat --all-routers""will only yield the
# summary statostics of the edge router. It will not show statistics of the interior routers.
outs = self.run_qdstat(['--all-routers'],
address=self.routers[2].addresses[0])
self.assertTrue("Router Id EA1" in outs)
outs = self.run_qdstat(['--all-routers', '--all-entities'],
address=self.routers[2].addresses[0])
# Check if each entity section is showing
self.assertTrue("Router Links" in outs)
self.assertTrue("Router Addresses" in outs)
self.assertTrue("Connections" in outs)
self.assertTrue("AutoLinks" in outs)
self.assertTrue("Auto Links" in outs)
self.assertEqual(outs.count("Link Routes"), 2)
self.assertTrue("Router Statistics" in outs)
self.assertTrue("Router Id EA1" in outs)
self.assertTrue("Types" in outs)
outs = self.run_qdstat(['-c', '--all-routers'],
address=self.routers[2].addresses[0])
# Verify that the the edhe uplink connection is showing
self.assertTrue("INT.A" in outs)
self.assertTrue("inter-router" not in outs)
outs = self.run_qdstat(['--all-entities'],
address=self.routers[2].addresses[0])
# Check if each entity section is showing
self.assertTrue("Router Links" in outs)
self.assertTrue("Router Addresses" in outs)
self.assertTrue("Connections" in outs)
self.assertTrue("AutoLinks" in outs)
self.assertTrue("Auto Links" in outs)
self.assertEqual(outs.count("Link Routes"), 2)
self.assertTrue("Router Statistics" in outs)
self.assertTrue("Router Id EA1" in outs)
self.assertTrue("Types" in outs)
def test_69_interior_qdstat_all_routers(self):
# Connects to an interior router and runs "qdstat --all-routers"
# "qdstat --all-routers" is same as "qdstat --all-routers --all-entities"
# Connecting to an interior router and running "qdstat --all-routers""will yield the
# summary statostics of all the interior routers.
outs = self.run_qdstat(['--all-routers'],
address=self.routers[0].addresses[0])
self.assertEqual(outs.count("Router Statistics"), 2)
outs = self.run_qdstat(['--all-routers', '-nv'],
address=self.routers[0].addresses[0])
# 5 occurences including section headers
self.assertEqual(outs.count("INT.A"), 5)
self.assertEqual(outs.count("INT.B"), 5)
outs = self.run_qdstat(['--all-routers', '--all-entities'],
address=self.routers[0].addresses[0])
self.assertEqual(outs.count("Router Links"), 2)
self.assertEqual(outs.count("Router Addresses"), 2)
self.assertEqual(outs.count("Connections"), 4)
self.assertEqual(outs.count("AutoLinks"), 2)
self.assertEqual(outs.count("Auto Links"), 2)
self.assertEqual(outs.count("Link Routes"), 4)
self.assertEqual(outs.count("Router Statistics"), 2)
self.assertEqual(outs.count("Types"), 2)
outs = self.run_qdstat(['--all-routers', '-nv'],
address=self.routers[0].addresses[0])
# 5 occurences including section headers
self.assertEqual(outs.count("INT.A"), 5)
self.assertEqual(outs.count("INT.B"), 5)
outs = self.run_qdstat(['-c', '--all-routers'],
address=self.routers[0].addresses[0])
self.assertEqual(outs.count("INT.A"), 2)
self.assertEqual(outs.count("INT.B"), 2)
outs = self.run_qdstat(['-l', '--all-routers'],
address=self.routers[0].addresses[0])
# Two edge-downlinks from each interior to the two edges, 4 in total.
self.assertEqual(outs.count("edge-downlink"), 4)
# Gets all entity information of the interior router
outs = self.run_qdstat(['--all-entities'],
address=self.routers[0].addresses[0])
self.assertEqual(outs.count("Router Links"), 1)
self.assertEqual(outs.count("Router Addresses"), 1)
self.assertEqual(outs.count("AutoLinks"), 1)
self.assertEqual(outs.count("Auto Links"), 1)
self.assertEqual(outs.count("Router Statistics"), 1)
self.assertEqual(outs.count("Link Routes"), 2)
if version_supports_mutually_exclusive_arguments():
has_error = False
try:
# You cannot combine --all-entities with -c
outs = self.run_qdstat(['-c', '--all-entities'],
address=self.routers[0].addresses[0])
except Exception as e:
if "error: argument --all-entities: not allowed with argument -c/--connections" in str(e):
has_error=True
self.assertTrue(has_error)
has_error = False
try:
outs = self.run_qdstat(['-r', 'INT.A', '--all-routers'],
address=self.routers[0].addresses[0])
except Exception as e:
if "error: argument --all-routers: not allowed with argument -r/--router" in str(e):
has_error=True
self.assertTrue(has_error)
def test_70_qdstat_edge_router_option(self):
# Tests the --edge-router (-d) option of qdstat
# The goal of this test is to connect to any router in the
# network (interior or edge) and ask for details about a specific edge router
# You could not do that before DISPATCH-1580
# Makes a connection to an interior router INT.A and runs qdstat
# asking for all connections of an edge router EA1
outs = self.run_qdstat(['-d', 'EA1', '-c'],
address=self.routers[0].addresses[0])
parts = outs.split("\n")
conn_found = False
for part in parts:
if "INT.A" in part and "edge" in part and "out" in part:
conn_found = True
break
self.assertTrue(conn_found)
# Makes a connection to an edge router and runs qdstat
# asking for all connections of an edge router EA1
outs = self.run_qdstat(['-d', 'EA1', '-c'],
address=self.routers[2].addresses[0])
parts = outs.split("\n")
conn_found = False
for part in parts:
if "INT.A" in part and "edge" in part and "out" in part:
conn_found = True
break
self.assertTrue(conn_found)
# Makes a connection to an interior router INT.B and runs qdstat
# asking for all connections of an edge router EA1. The interior
# router INT.B is connected to edge router EA1 indirectly via
# interior router INT.A
outs = self.run_qdstat(['--edge-router', 'EA1', '-c'],
address=self.routers[1].addresses[0])
parts = outs.split("\n")
conn_found = False
for part in parts:
if "INT.A" in part and "edge" in part and "out" in part:
conn_found = True
break
self.assertTrue(conn_found)
def test_71_qdmanage_edge_router_option(self):
# Makes a connection to an interior router INT.A and runs qdstat
# asking for all connections of an edge router EA1
mgmt = QdManager(self, address=self.routers[0].addresses[0],
edge_router_id='EA1')
conn_found = False
outs = mgmt.query('org.apache.qpid.dispatch.connection')
for out in outs:
if out['container'] == 'INT.A' and out['dir'] == "out" and out['role'] == "edge":
conn_found = True
break
self.assertTrue(conn_found)
# Makes a connection to an edge router and runs qdstat
# asking for all connections of an edge router EA1
mgmt = QdManager(self, address=self.routers[2].addresses[0],
edge_router_id='EA1')
conn_found = False
outs = mgmt.query('org.apache.qpid.dispatch.connection')
for out in outs:
if out['container'] == 'INT.A' and out['dir'] == "out" and out['role'] == "edge":
conn_found = True
break
self.assertTrue(conn_found)
# Makes a connection to an interior router INT.B and runs qdstat
# asking for all connections of an edge router EA1. The interior
# router INT.B is connected to edge router EA1 indirectly via
# interior router INT.A
mgmt = QdManager(self, address=self.routers[1].addresses[0],
edge_router_id='EA1')
conn_found = False
outs = mgmt.query('org.apache.qpid.dispatch.connection')
for out in outs:
if out['container'] == 'INT.A' and out['dir'] == "out" and out['role'] == "edge":
conn_found = True
break
self.assertTrue(conn_found)
def test_72_qdstat_query_interior_from_edge(self):
# Connect to Edge Router EA1 and query the connections on
# Interior Router INT.A
outs = self.run_qdstat(['-r', 'INT.A', '-c'],
address=self.routers[2].addresses[0])
# The Interior Router INT.A is connected to two edge routers
# EA1 and EA2 and is also connected to another interior router INT.B
# We will connect to edge router EA1 (which has an edge
# uplink to INT.A) and query for connections on INT.A
ea1_conn_found = False
ea2_conn_found = False
int_b_inter_router_conn_found = False
parts = outs.split("\n")
for part in parts:
if "INT.B" in part and "inter-router" in part and "in" in part:
int_b_inter_router_conn_found = True
if "EA1" in part and "edge" in part and "in" in part:
ea1_conn_found = True
if "EA2" in part and "edge" in part and "in" in part:
ea2_conn_found = True
self.assertTrue(ea1_conn_found and ea2_conn_found and int_b_inter_router_conn_found)
# The Interior Router INT.B is connected indirectly to edge router
# EA1 via INT.A
# We will connect to edge router EA1 (which has an edge
# uplink to INT.A) and query for connections on INT.B
outs = self.run_qdstat(['-r', 'INT.B', '-c'],
address=self.routers[2].addresses[0])
eb1_conn_found = False
eb2_conn_found = False
int_a_inter_router_conn_found = False
parts = outs.split("\n")
for part in parts:
if "INT.A" in part and "inter-router" in part and "out" in part:
int_a_inter_router_conn_found = True
if "EB1" in part and "edge" in part and "in" in part:
eb1_conn_found = True
if "EB2" in part and "edge" in part and "in" in part:
eb2_conn_found = True
self.assertTrue(eb1_conn_found and eb2_conn_found and int_a_inter_router_conn_found)
def test_73_qdmanage_query_interior_from_edge(self):
# The Interior Router INT.A is connected to two edge routers
# EA1 and EA2 and is also connected to another interior router INT.B
# We will connect to edge router EA1 (which has an edge
# uplink to INT.A) and query for connections on INT.A
mgmt = QdManager(self, address=self.routers[2].addresses[0],
router_id='INT.A')
outs = mgmt.query('org.apache.qpid.dispatch.connection')
ea1_conn_found = False
ea2_conn_found = False
int_b_inter_router_conn_found = False
for out in outs:
if out['container'] == "INT.B" and out['role'] == "inter-router" and out['dir'] == "in":
int_b_inter_router_conn_found = True
if out['container'] == "EA1" and out['role'] == "edge" and out['dir'] == "in":
ea1_conn_found = True
if out['container'] == "EA2" and out['role'] == "edge" and out['dir'] == "in":
ea2_conn_found = True
self.assertTrue(ea1_conn_found and ea2_conn_found and int_b_inter_router_conn_found)
# The Interior Router INT.B is connected indirectly to edge router
# EA1 via INT.A
# We will connect to edge router EA1 (which has an edge
# uplink to INT.A) and query for connections on INT.B
mgmt = QdManager(self, address=self.routers[2].addresses[0],
router_id='INT.B')
outs = mgmt.query('org.apache.qpid.dispatch.connection')
eb1_conn_found = False
eb2_conn_found = False
int_a_inter_router_conn_found = False
for out in outs:
if out['container'] == "INT.A" and out['role'] == "inter-router" and out['dir'] == "out":
int_a_inter_router_conn_found = True
if out['container'] == "EB1" and out['role'] == "edge" and out['dir'] == "in":
eb1_conn_found = True
if out['container'] == "EB2" and out['role'] == "edge" and out['dir'] == "in":
eb2_conn_found = True
self.assertTrue(int_a_inter_router_conn_found and eb1_conn_found and eb2_conn_found)
class LinkRouteProxyTest(TestCase):
"""
Test edge router's ability to proxy configured and connection-scoped link
routes into the interior
"""
@classmethod
def setUpClass(cls):
"""Start a router"""
super(LinkRouteProxyTest, cls).setUpClass()
def router(name, mode, extra):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'role': 'normal', 'port': cls.tester.get_port()})
]
if extra:
config.extend(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
return cls.routers[-1]
# configuration:
# two edge routers connected via 2 interior routers.
#
# +-------+ +---------+ +---------+ +-------+
# | EA1 |<==>| INT.A |<==>| INT.B |<==>| EB1 |
# +-------+ +---------+ +---------+ +-------+
cls.routers = []
interrouter_port = cls.tester.get_port()
cls.INTA_edge_port = cls.tester.get_port()
cls.INTB_edge_port = cls.tester.get_port()
router('INT.A', 'interior',
[('listener', {'role': 'inter-router', 'port': interrouter_port}),
('listener', {'role': 'edge', 'port': cls.INTA_edge_port})])
cls.INT_A = cls.routers[0]
cls.INT_A.listener = cls.INT_A.addresses[0]
router('INT.B', 'interior',
[('connector', {'name': 'connectorToA', 'role': 'inter-router',
'port': interrouter_port}),
('listener', {'role': 'edge', 'port': cls.INTB_edge_port})])
cls.INT_B = cls.routers[1]
cls.INT_B.listener = cls.INT_B.addresses[0]
router('EA1', 'edge',
[('listener', {'name': 'rc', 'role': 'route-container',
'port': cls.tester.get_port()}),
('connector', {'name': 'uplink', 'role': 'edge',
'port': cls.INTA_edge_port}),
('linkRoute', {'prefix': 'CfgLinkRoute1', 'containerId': 'FakeBroker', 'direction': 'in'}),
('linkRoute', {'prefix': 'CfgLinkRoute1', 'containerId': 'FakeBroker', 'direction': 'out'})])
cls.EA1 = cls.routers[2]
cls.EA1.listener = cls.EA1.addresses[0]
cls.EA1.route_container = cls.EA1.addresses[1]
router('EB1', 'edge',
[('connector', {'name': 'uplink', 'role': 'edge',
'port': cls.INTB_edge_port}),
('listener', {'name': 'rc', 'role': 'route-container',
'port': cls.tester.get_port()}),
('linkRoute', {'pattern': '*.cfg.pattern.#', 'containerId': 'FakeBroker', 'direction': 'in'}),
('linkRoute', {'pattern': '*.cfg.pattern.#', 'containerId': 'FakeBroker', 'direction': 'out'})])
cls.EB1 = cls.routers[3]
cls.EB1.listener = cls.EB1.addresses[0]
cls.EB1.route_container = cls.EB1.addresses[1]
cls.INT_A.wait_router_connected('INT.B')
cls.INT_B.wait_router_connected('INT.A')
cls.EA1.wait_connectors()
cls.EB1.wait_connectors()
cls.CFG_LINK_ROUTE_TYPE = 'org.apache.qpid.dispatch.router.config.linkRoute'
cls.CONN_LINK_ROUTE_TYPE = 'org.apache.qpid.dispatch.router.connection.linkRoute'
cls.CONNECTOR_TYPE = 'org.apache.qpid.dispatch.connector'
cls.skip = { 'test_01' : 0,
'test_02' : 0,
'test_03' : 0,
'test_50' : 0,
'test_51' : 0,
'test_52' : 0
}
def _get_address(self, router, address):
"""Lookup address in route table"""
a_type = 'org.apache.qpid.dispatch.router.address'
addrs = router.management.query(a_type).get_dicts()
return list(filter(lambda a: a['name'].find(address) != -1,
addrs))
def _wait_address_gone(self, router, address):
"""Block until address is removed from the route table"""
while self._get_address(router, address):
sleep(0.1)
def _test_traffic(self, sender, receiver, address, count=5):
"""Generate message traffic between two normal clients"""
tr = AsyncTestReceiver(receiver, address)
ts = AsyncTestSender(sender, address, count)
ts.wait() # wait until all sent
for i in range(count):
tr.queue.get(timeout=TIMEOUT)
tr.stop()
def test_01_immedate_detach_reattach(self):
if self.skip [ 'test_01' ] :
self.skipTest ( "Test skipped during development." )
"""
Have a service for a link routed address abruptly detach
in response to an incoming link attach
The attaching client from EB1 will get an attach response then an
immediate detach. The client will immediately re-establish the link.
"""
class AttachDropper(FakeService):
def __init__(self, *args, **kwargs):
super(AttachDropper, self).__init__(*args, **kwargs)
self.link_dropped = Event()
def on_link_remote_open(self, event):
# drop it
event.link.close()
event.connection.close()
self.link_dropped.set()
ad = AttachDropper(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
# create a consumer, do not wait for link to open, reattach
# on received detach
rx = AsyncTestReceiver(self.EB1.listener, 'CfgLinkRoute1/foo',
wait=False, recover_link=True)
ad.link_dropped.wait(timeout=TIMEOUT)
ad.join() # wait for thread exit
# wait until prefix addresses are removed
self._wait_address_gone(self.INT_B, "CCfgLinkRoute1")
self._wait_address_gone(self.INT_B, "DCfgLinkRoute1")
rx.stop()
# now attach a working service to the same address,
# make sure it all works
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
rx = AsyncTestReceiver(self.EB1.listener, 'CfgLinkRoute1/foo',
wait=False, recover_link=True)
tx = AsyncTestSender(self.EA1.listener, 'CfgLinkRoute1/foo',
message=Message(body="HEY HO LET'S GO!"))
tx.wait()
msg = rx.queue.get(timeout=TIMEOUT)
self.assertTrue(msg.body == "HEY HO LET'S GO!")
rx.stop()
fs.join()
self.assertEqual(1, fs.in_count)
self.assertEqual(1, fs.out_count)
# wait until addresses are cleaned up
self._wait_address_gone(self.INT_A, "CfgLinkRoute1")
self._wait_address_gone(self.INT_B, "CfgLinkRoute1")
def test_02_thrashing_link_routes(self):
"""
Rapidly add and delete link routes at the edge
"""
if self.skip [ 'test_02' ] :
self.skipTest ( "Test skipped during development." )
# activate the pre-configured link routes
ea1_mgmt = self.EA1.management
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
for i in range(10):
lr1 = ea1_mgmt.create(type=self.CFG_LINK_ROUTE_TYPE,
name="TestLRout%d" % i,
attributes={'pattern': 'Test/*/%d/#' % i,
'containerId': 'FakeBroker',
'direction': 'out'})
lr2 = ea1_mgmt.create(type=self.CFG_LINK_ROUTE_TYPE,
name="TestLRin%d" % i,
attributes={'pattern': 'Test/*/%d/#' % i,
'containerId': 'FakeBroker',
'direction': 'in'})
# verify that they are correctly propagated (once)
if i == 9:
self.INT_B.wait_address("Test/*/9/#")
lr1.delete()
lr2.delete()
fs.join()
self._wait_address_gone(self.INT_B, "CfgLinkRoute1")
def _validate_topology(self, router, expected_links, address):
"""
query existing links and verify they are set up as expected
"""
mgmt = QdManager(self, address=router)
# fetch all the connections
cl = mgmt.query('org.apache.qpid.dispatch.connection')
# map them by their identity
conns = dict([(c['identity'], c) for c in cl])
# now fetch all links for the address
ll = mgmt.query('org.apache.qpid.dispatch.router.link')
test_links = [l for l in ll if
l.get('owningAddr', '').find(address) != -1]
self.assertEqual(len(expected_links), len(test_links))
for elink in expected_links:
matches = filter(lambda l: (l['linkDir'] == elink[0]
and
conns[l['connectionId']]['container'] == elink[1]
and
conns[l['connectionId']]['role'] == elink[2]),
test_links)
self.assertTrue(len(list(matches)) == 1)
def test_03_interior_conn_lost(self):
"""
What happens when the interior connection bounces?
"""
if self.skip [ 'test_03' ] :
self.skipTest ( "Test skipped during development." )
config = Qdrouterd.Config([('router', {'mode': 'edge',
'id': 'Edge1'}),
('listener', {'role': 'normal',
'port': self.tester.get_port()}),
('listener', {'name': 'rc',
'role': 'route-container',
'port': self.tester.get_port()}),
('linkRoute', {'pattern': 'Edge1/*',
'containerId': 'FakeBroker',
'direction': 'in'}),
('linkRoute', {'pattern': 'Edge1/*',
'containerId': 'FakeBroker',
'direction': 'out'})])
er = self.tester.qdrouterd('Edge1', config, wait=True)
# activate the link routes before the connection exists
fs = FakeService(er.addresses[1])
er.wait_address("Edge1/*")
# create the connection to interior
er_mgmt = er.management
ctor = er_mgmt.create(type=self.CONNECTOR_TYPE,
name='toA',
attributes={'role': 'edge',
'port': self.INTA_edge_port})
self.INT_B.wait_address("Edge1/*")
# delete it, and verify the routes are removed
ctor.delete()
self._wait_address_gone(self.INT_B, "Edge1/*")
# now recreate and verify routes re-appear
ctor = er_mgmt.create(type=self.CONNECTOR_TYPE,
name='toA',
attributes={'role': 'edge',
'port': self.INTA_edge_port})
self.INT_B.wait_address("Edge1/*")
self._test_traffic(self.INT_B.listener,
self.INT_B.listener,
"Edge1/One",
count=5)
fs.join()
self.assertEqual(5, fs.in_count)
self.assertEqual(5, fs.out_count)
er.teardown()
self._wait_address_gone(self.INT_B, "Edge1/*")
def test_50_link_topology(self):
"""
Verify that the link topology that results from activating a link route
and sending traffic is correct
"""
if self.skip [ 'test_50' ] :
self.skipTest ( "Test skipped during development." )
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
# create a sender on one edge and the receiver on another
bc_b = BlockingConnection(self.EB1.listener, timeout=TIMEOUT)
erx = bc_b.create_receiver(address="CfgLinkRoute1/buhbye", credit=10)
bc_a = BlockingConnection(self.EA1.listener, timeout=TIMEOUT)
etx = bc_a.create_sender(address="CfgLinkRoute1/buhbye")
etx.send(Message(body="HI THERE"), timeout=TIMEOUT)
self.assertEqual("HI THERE", erx.receive(timeout=TIMEOUT).body)
erx.accept()
# expect the following links have been established for the
# "CfgLinkRoute1/buhbye" address:
# EA1
# 1 out link to INT.A (connection role: edge)
# 1 in link from bc_a (normal)
# 1 in link from FakeBroker (route-container)
# 1 out link to FakeBroker (route-container)
# INT.A
# 1 in link from EA1 (edge)
# 1 out link to INT.B (inter-router)
# INT.B
# 1 out link to EB1 (edge)
# 1 in link from INT.A (inter-router)
# EB1
# 1 out link to bc_b (normal)
# 1 in link from INT.B (edge)
expect = {
self.EA1.listener: [
('in', bc_a.container.container_id, 'normal'),
('in', 'FakeBroker', 'route-container'),
('out', 'FakeBroker', 'route-container'),
('out', 'INT.A', 'edge')],
self.INT_A.listener: [
('in', 'EA1', 'edge'),
('out', 'INT.B', 'inter-router')],
self.INT_B.listener: [
('in', 'INT.A', 'inter-router'),
('out', 'EB1', 'edge')],
self.EB1.listener: [
('in', 'INT.B', 'edge'),
('out', bc_b.container.container_id, 'normal')]
}
for router, expected_links in expect.items():
self._validate_topology(router, expected_links,
'CfgLinkRoute1/buhbye')
fs.join()
self.assertEqual(1, fs.in_count)
self.assertEqual(1, fs.out_count)
def test_51_link_route_proxy_configured(self):
"""
Activate the configured link routes via a FakeService, verify proxies
created by passing traffic from/to and interior router
"""
if self.skip [ 'test_51' ] :
self.skipTest ( "Test skipped during development." )
a_type = 'org.apache.qpid.dispatch.router.address'
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
self._test_traffic(self.INT_B.listener,
self.INT_B.listener,
"CfgLinkRoute1/hi",
count=5)
fs.join()
self.assertEqual(5, fs.in_count)
self.assertEqual(5, fs.out_count)
# now that FakeService is gone, the link route should no longer be
# active:
self._wait_address_gone(self.INT_A, "CfgLinkRoute1")
# repeat test, but this time with patterns:
fs = FakeService(self.EB1.route_container)
self.INT_A.wait_address("*.cfg.pattern.#")
self._test_traffic(self.INT_A.listener,
self.INT_A.listener,
"MATCH.cfg.pattern",
count=5)
fs.join()
self.assertEqual(5, fs.in_count)
self.assertEqual(5, fs.out_count)
self._wait_address_gone(self.INT_A, "*.cfg.pattern.#")
def test_52_conn_link_route_proxy(self):
"""
Test connection scoped link routes by connecting a fake service to the
Edge via the route-container connection. Have the fake service
configured some link routes. Then have clients on the interior
exchange messages via the fake service.
"""
if self.skip [ 'test_52' ] :
self.skipTest ( "Test skipped during development." )
fs = ConnLinkRouteService(self.EA1.route_container,
container_id="FakeService",
config = [("ConnLinkRoute1",
{"pattern": "Conn/*/One",
"direction": "out"}),
("ConnLinkRoute2",
{"pattern": "Conn/*/One",
"direction": "in"})])
self.assertEqual(2, len(fs.values))
self.INT_B.wait_address("Conn/*/One")
self.assertEqual(2, len(self._get_address(self.INT_A, "Conn/*/One")))
# between interiors
self._test_traffic(self.INT_B.listener,
self.INT_A.listener,
"Conn/BLAB/One",
count=5)
# edge to edge
self._test_traffic(self.EB1.listener,
self.EA1.listener,
"Conn/BLECH/One",
count=5)
fs.join()
self.assertEqual(10, fs.in_count)
self.assertEqual(10, fs.out_count)
self._wait_address_gone(self.INT_A, "Conn/*/One")
class Timeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.timeout()
class PollTimeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.poll_timeout()
class ConnectivityTest(MessagingHandler):
def __init__(self, interior_host, edge_host, edge_id):
super(ConnectivityTest, self).__init__()
self.interior_host = interior_host
self.edge_host = edge_host
self.edge_id = edge_id
self.interior_conn = None
self.edge_conn = None
self.error = None
self.proxy = None
self.query_sent = False
def timeout(self):
self.error = "Timeout Expired"
self.interior_conn.close()
self.edge_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
self.interior_conn = event.container.connect(self.interior_host)
self.edge_conn = event.container.connect(self.edge_host)
self.reply_receiver = event.container.create_receiver(self.interior_conn, dynamic=True)
def on_link_opened(self, event):
if event.receiver == self.reply_receiver:
self.proxy = MgmtMsgProxy(self.reply_receiver.remote_source.address)
self.agent_sender = event.container.create_sender(self.interior_conn, "$management")
def on_sendable(self, event):
if not self.query_sent:
self.query_sent = True
self.agent_sender.send(self.proxy.query_connections())
def on_message(self, event):
if event.receiver == self.reply_receiver:
response = self.proxy.response(event.message)
if response.status_code != 200:
self.error = "Unexpected error code from agent: %d - %s" % (response.status_code, response.status_description)
connections = response.results
count = 0
for conn in connections:
if conn.role == 'edge' and conn.container == self.edge_id:
count += 1
if count != 1:
self.error = "Incorrect edge count for container-id. Expected 1, got %d" % count
self.interior_conn.close()
self.edge_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class DynamicAddressTest(MessagingHandler):
def __init__(self, receiver_host, sender_host):
super(DynamicAddressTest, self).__init__()
self.receiver_host = receiver_host
self.sender_host = sender_host
self.receiver_conn = None
self.sender_conn = None
self.receiver = None
self.address = None
self.count = 300
self.n_rcvd = 0
self.n_sent = 0
self.error = None
def timeout(self):
self.error = "Timeout Expired - n_sent=%d n_rcvd=%d addr=%s" % (self.n_sent, self.n_rcvd, self.address)
self.receiver_conn.close()
self.sender_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(5.0, Timeout(self))
self.receiver_conn = event.container.connect(self.receiver_host)
self.sender_conn = event.container.connect(self.sender_host)
self.receiver = event.container.create_receiver(self.receiver_conn, dynamic=True)
def on_link_opened(self, event):
if event.receiver == self.receiver:
self.address = self.receiver.remote_source.address
self.sender = event.container.create_sender(self.sender_conn, self.address)
def on_sendable(self, event):
while self.n_sent < self.count:
self.sender.send(Message(body="Message %d" % self.n_sent))
self.n_sent += 1
def on_message(self, event):
self.n_rcvd += 1
if self.n_rcvd == self.count:
self.receiver_conn.close()
self.sender_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class CustomTimeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
message = Message(body="Test Message")
message.address = self.parent.address
self.parent.sender.send(message)
self.parent.cancel_custom()
class MobileAddressAnonymousTest(MessagingHandler):
"""
Attach a receiver to the interior and an anonymous sender to the edge router
In a non-anonymous sender scenario, the sender will never be given credit
to send until a receiver on the same address shows up . Since this
is an anonymous sender, credit is given instatnly and the sender starts
sending immediately.
This test will first send 3 messages with a one second interval to make
sure receiver is available. Then it will fire off 300 messages
After dispositions are received for the 300 messages, it will close the
receiver and send 50 more messages. These 50 messages should be released
or modified.
"""
def __init__(self, receiver_host, sender_host, address, large_msg=False):
super(MobileAddressAnonymousTest, self).__init__()
self.receiver_host = receiver_host
self.sender_host = sender_host
self.receiver_conn = None
self.sender_conn = None
self.receiver = None
self.sender = None
self.error = None
self.n_sent = 0
self.n_rcvd = 0
self.address = address
self.ready = False
self.custom_timer = None
self.num_msgs = 300
self.extra_msgs = 50
self.n_accepted = 0
self.n_modified = 0
self.n_released = 0
self.error = None
self.max_attempts = 3
self.num_attempts = 0
self.test_started = False
self.large_msg = large_msg
if self.large_msg:
self.body = "0123456789101112131415" * 10000
self.properties = {'big field': 'X' * 32000}
def on_start(self, event):
self.timer = event.reactor.schedule(15.0 if self.large_msg else 5.0, Timeout(self))
self.receiver_conn = event.container.connect(self.receiver_host)
self.sender_conn = event.container.connect(self.sender_host)
self.receiver = event.container.create_receiver(self.receiver_conn, self.address)
# This is an anonymous sender.
self.sender = event.container.create_sender(self.sender_conn)
def cancel_custom(self):
self.custom_timer.cancel()
def timeout(self):
if self.ready:
self.error = "Timeout Expired - n_sent=%d n_accepted=%d n_modified=%d n_released=%d" % (
self.n_sent, self.n_accepted, self.n_modified, self.n_released)
else:
self.error = "Did not get a settlement from the receiver. The test cannot be started until " \
"a settlement to a test message is received"
self.receiver_conn.close()
self.sender_conn.close()
def on_sendable(self, event):
if not self.test_started:
message = Message(body="Test Message")
message.address = self.address
self.sender.send(message)
self.num_attempts += 1
self.test_started = True
def on_message(self, event):
if event.receiver == self.receiver:
if self.ready:
self.n_rcvd += 1
def on_link_closed(self, event):
# The receiver has closed. We will send messages again and
# make sure they are released.
if event.receiver == self.receiver:
for i in range(self.extra_msgs):
if self.large_msg:
message = Message(body=self.body, properties=self.properties)
else:
message = Message(body="Message %d" % self.n_sent)
message.address = self.address
self.sender.send(message)
self.n_sent += 1
def on_settled(self, event):
rdisp = str(event.delivery.remote_state)
if rdisp == "RELEASED" and not self.ready:
if self.num_attempts < self.max_attempts:
self.custom_timer = event.reactor.schedule(1, CustomTimeout(self))
self.num_attempts += 1
elif rdisp == "ACCEPTED" and not self.ready:
self.ready = True
for i in range(self.num_msgs):
if self.large_msg:
message = Message(body=self.body, properties=self.properties)
else:
message = Message(body="Message %d" % self.n_sent)
message.address = self.address
self.sender.send(message)
self.n_sent += 1
elif rdisp == "ACCEPTED" and self.ready:
self.n_accepted += 1
if self.n_accepted == self.num_msgs:
# Close the receiver after sending 300 messages
self.receiver.close()
elif rdisp == "RELEASED" and self.ready:
self.n_released += 1
elif rdisp == "MODIFIED" and self.ready:
self.n_modified += 1
if self.num_msgs == self.n_accepted and self.extra_msgs == self.n_released + self.n_modified:
self.receiver_conn.close()
self.sender_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class MobileAddressTest(MessagingHandler):
"""
From a single container create a sender and a receiver connection.
Send a batch of normal messages that should be accepted by the receiver.
Close the receiver but not the receiver connection and then
send an extra batch of messages that should be released or modified.
Success is when message disposition counts add up correctly.
"""
def __init__(self, receiver_host, sender_host, address):
super(MobileAddressTest, self).__init__()
self.receiver_host = receiver_host
self.sender_host = sender_host
self.address = address
self.receiver_conn = None
self.sender_conn = None
self.receiver = None
self.sender = None
self.logger = Logger()
self.normal_count = 300
self.extra_count = 50
self.n_rcvd = 0
self.n_sent = 0
self.n_accepted = 0
self.n_rel_or_mod = 0
self.error = None
self.warning = False
def fail_exit(self, title):
self.error = title
self.logger.log("MobileAddressTest result:ERROR: %s" % title)
self.logger.log("address %s " % self.address)
self.logger.log("n_sent = %d. Expected total:%d normal=%d, extra=%d" % \
(self.n_sent, (self.normal_count + self.extra_count), self.normal_count, self.extra_count))
self.logger.log("n_rcvd = %d. Expected %d" % (self.n_rcvd, self.normal_count))
self.logger.log("n_accepted = %d. Expected %d" % (self.n_accepted, self.normal_count))
self.logger.log("n_rel_or_mod = %d. Expected %d" % (self.n_rel_or_mod, self.extra_count))
self.timer.cancel()
self.receiver_conn.close()
self.sender_conn.close()
def on_timer_task(self, event):
self.fail_exit("Timeout Expired")
def on_start(self, event):
self.logger.log("on_start address=%s" % self.address)
self.timer = event.reactor.schedule(5.0, self)
self.receiver_conn = event.container.connect(self.receiver_host)
self.sender_conn = event.container.connect(self.sender_host)
self.receiver = event.container.create_receiver(self.receiver_conn, self.address)
self.sender = event.container.create_sender(self.sender_conn, self.address)
def on_sendable(self, event):
self.logger.log("on_sendable")
if event.sender == self.sender:
self.logger.log("on_sendable sender")
while self.n_sent < self.normal_count:
# send the normal messages
message = Message(body="Message %d" % self.n_sent)
self.sender.send(message)
self.logger.log("on_sendable sender: send message %d: %s" % (self.n_sent, message))
self.n_sent += 1
elif event.receiver == self.receiver:
self.logger.log("on_sendable receiver: WARNING unexpected callback for receiver")
self.warning = True
else:
self.fail_exit("on_sendable not for sender nor for receiver")
def on_message(self, event):
self.logger.log("on_message")
if event.receiver == self.receiver:
self.n_rcvd += 1
self.logger.log("on_message receiver: receiver message %d" % (self.n_rcvd))
else:
self.logger.log("on_message: WARNING callback not for test receiver.")
def on_settled(self, event):
# Expect all settlement events at sender as remote state
self.logger.log("on_settled")
rdisp = str(event.delivery.remote_state)
ldisp = str(event.delivery.local_state)
if event.sender == self.sender:
if rdisp is None:
self.logger.log("on_settled: WARNING: sender remote delivery state is None. Local state = %s." % ldisp)
elif rdisp == "ACCEPTED":
self.n_accepted += 1
self.logger.log("on_settled sender: ACCEPTED %d (of %d)" %
(self.n_accepted, self.normal_count))
elif rdisp == "RELEASED" or rdisp == "MODIFIED":
self.n_rel_or_mod += 1
self.logger.log("on_settled sender: %s %d (of %d)" %
(rdisp, self.n_rel_or_mod, self.extra_count))
else:
self.logger.log("on_settled sender: WARNING unexpected settlement: %s, n_accepted: %d, n_rel_or_mod: %d" %
(rdisp, self.n_accepted, self.n_rel_or_mod))
self.warning = True
if self.n_sent == self.normal_count and self.n_accepted == self.normal_count:
# All normal messages are accounted.
# Close receiver and launch extra messages into the router network.
self.logger.log("on_settled sender: normal messages all accounted. receiver.close() then send extra messages")
self.receiver.close()
for i in range(self.extra_count):
message = Message(body="Message %d" % self.n_sent)
self.sender.send(message)
# Messages must be blasted to get them into the network before news
# of the receiver closure is propagated back to EA1.
# self.logger.log("on_settled sender: send extra message %d: %s" % (self.n_sent, message))
self.n_sent += 1
if self.n_accepted > self.normal_count:
self.fail_exit("Too many messages were accepted")
if self.n_rel_or_mod > self.extra_count:
self.fail_exit("Too many messages were released or modified")
if self.n_rel_or_mod == self.extra_count:
# All extra messages are accounted. Exit with success.
result = "SUCCESS" if not self.warning else "WARNING"
self.logger.log("MobileAddressTest result:%s" % result)
self.timer.cancel()
self.receiver_conn.close()
self.sender_conn.close()
elif event.receiver == self.receiver:
self.logger.log("on_settled receiver: WARNING unexpected on_settled. remote: %s, local: %s" % (rdisp, ldisp))
self.warning = True
def run(self):
Container(self).run()
class MobileAddressOneSenderTwoReceiversTest(MessagingHandler):
def __init__(self, receiver1_host, receiver2_host, sender_host, address):
super(MobileAddressOneSenderTwoReceiversTest, self).__init__()
self.receiver1_host = receiver1_host
self.receiver2_host = receiver2_host
self.sender_host = sender_host
self.address = address
# One sender connection and two receiver connections
self.receiver1_conn = None
self.receiver2_conn = None
self.sender_conn = None
self.receiver1 = None
self.receiver2 = None
self.sender = None
self.count = 300
self.rel_count = 50
self.n_rcvd1 = 0
self.n_rcvd2 = 0
self.n_sent = 0
self.n_settled = 0
self.n_released = 0
self.error = None
self.timer = None
self.all_msgs_received = False
self.recvd_msg_bodies = dict()
self.dup_msg = None
def timeout(self):
if self.dup_msg:
self.error = "Duplicate message %s received " % self.dup_msg
else:
self.error = "Timeout Expired - n_sent=%d n_rcvd=%d n_settled=%d n_released=%d addr=%s" % \
(self.n_sent, (self.n_rcvd1 + self.n_rcvd2), self.n_settled, self.n_released, self.address)
self.receiver1_conn.close()
self.receiver2_conn.close()
self.sender_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(5.0, Timeout(self))
# Create two receivers
self.receiver1_conn = event.container.connect(self.receiver1_host)
self.receiver2_conn = event.container.connect(self.receiver2_host)
self.receiver1 = event.container.create_receiver(self.receiver1_conn,
self.address)
self.receiver2 = event.container.create_receiver(self.receiver2_conn,
self.address)
# Create one sender
self.sender_conn = event.container.connect(self.sender_host)
self.sender = event.container.create_sender(self.sender_conn,
self.address)
def on_sendable(self, event):
while self.n_sent < self.count:
self.sender.send(Message(body="Message %d" % self.n_sent))
self.n_sent += 1
def on_message(self, event):
if self.recvd_msg_bodies.get(event.message.body):
self.dup_msg = event.message.body
self.timeout()
else:
self.recvd_msg_bodies[event.message.body] = event.message.body
if event.receiver == self.receiver1:
self.n_rcvd1 += 1
if event.receiver == self.receiver2:
self.n_rcvd2 += 1
if self.n_sent == self.n_rcvd1 + self.n_rcvd2:
self.all_msgs_received = True
def on_settled(self, event):
self.n_settled += 1
if self.n_settled == self.count:
self.receiver1.close()
self.receiver2.close()
for i in range(self.rel_count):
self.sender.send(Message(body="Message %d" % self.n_sent))
self.n_sent += 1
def on_released(self, event):
self.n_released += 1
if self.n_released == self.rel_count and self.all_msgs_received:
self.receiver1_conn.close()
self.receiver2_conn.close()
self.sender_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class MobileAddressMulticastTest(MessagingHandler):
def __init__(self, receiver1_host, receiver2_host, receiver3_host,
sender_host, address, check_addr_host=None, large_msg=False,
anon_sender=False):
super(MobileAddressMulticastTest, self).__init__()
self.receiver1_host = receiver1_host
self.receiver2_host = receiver2_host
self.receiver3_host = receiver3_host
self.sender_host = sender_host
self.address = address
self.anon_sender = anon_sender
# One sender connection and two receiver connections
self.receiver1_conn = None
self.receiver2_conn = None
self.receiver3_conn = None
self.sender_conn = None
self.receiver1 = None
self.receiver2 = None
self.receiver3 = None
self.sender = None
self.count = 200
self.n_rcvd1 = 0
self.n_rcvd2 = 0
self.n_rcvd3 = 0
self.n_sent = 0
self.n_settled = 0
self.n_released = 0
self.error = None
self.timer = None
self.all_msgs_received = False
self.recvd1_msgs = dict()
self.recvd2_msgs = dict()
self.recvd3_msgs = dict()
self.dup_msg_rcvd = False
self.dup_msg = None
self.receiver_name = None
self.large_msg = large_msg
self.body = ""
self.r_attaches = 0
self.reactor = None
self.addr_timer = None
# The maximum number of times we are going to try to check if the
# address has propagated.
self.max_attempts = 5
self.num_attempts = 0
self.num_attempts = 0
self.container = None
self.check_addr_host = check_addr_host
if not self.check_addr_host:
self.check_addr_host = self.sender_host
if self.large_msg:
self.body = "0123456789101112131415" * 10000
self.properties = {'big field': 'X' * 32000}
def timeout(self):
if self.dup_msg:
self.error = "%s received duplicate message %s" % \
(self.receiver_name, self.dup_msg)
else:
if not self.error:
self.error = "Timeout Expired - n_sent=%d n_rcvd1=%d " \
"n_rcvd2=%d n_rcvd3=%d addr=%s" % \
(self.n_sent, self.n_rcvd1, self.n_rcvd2,
self.n_rcvd3, self.address)
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
if self.sender_conn:
self.sender_conn.close()
def create_sndr(self):
self.sender_conn = self.container.connect(self.sender_host)
if self.anon_sender:
self.sender = self.container.create_sender(self.sender_conn)
else:
self.sender = self.container.create_sender(self.sender_conn,
self.address)
def check_address(self):
local_node = Node.connect(self.check_addr_host, timeout=TIMEOUT)
outs = local_node.query(type='org.apache.qpid.dispatch.router.address')
found = False
self.num_attempts += 1
for result in outs.results:
if self.address in result[0]:
found = True
self.create_sndr()
local_node.close()
self.addr_timer.cancel()
break
if not found:
if self.num_attempts < self.max_attempts:
self.addr_timer = self.reactor.schedule(1.0, AddrTimer(self))
else:
self.error = "Unable to create sender because of " \
"absence of address in the address table"
self.timeout()
local_node.close()
def on_start(self, event):
self.timer = event.reactor.schedule(20.0 if self.large_msg else 10.0,
Timeout(self))
# Create two receivers
self.receiver1_conn = event.container.connect(self.receiver1_host)
self.receiver2_conn = event.container.connect(self.receiver2_host)
self.receiver3_conn = event.container.connect(self.receiver3_host)
self.receiver1 = event.container.create_receiver(self.receiver1_conn,
self.address)
self.receiver2 = event.container.create_receiver(self.receiver2_conn,
self.address)
self.receiver3 = event.container.create_receiver(self.receiver3_conn,
self.address)
self.container = event.container
def on_link_opened(self, event):
if event.receiver == self.receiver1 or \
event.receiver == self.receiver2 or \
event.receiver == self.receiver3:
self.r_attaches += 1
if self.r_attaches == 3:
self.reactor = event.reactor
self.addr_timer = self.reactor.schedule(1.0, AddrTimer(self))
def on_sendable(self, event):
while self.n_sent < self.count:
msg = None
if self.large_msg:
msg = Message(body=self.body, properties=self.properties)
else:
msg = Message(body="Message %d" % self.n_sent)
if self.anon_sender:
msg.address = self.address
msg.correlation_id = self.n_sent
self.sender.send(msg)
self.n_sent += 1
def on_message(self, event):
if event.receiver == self.receiver1:
if self.recvd1_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 1"
self.timeout()
self.n_rcvd1 += 1
self.recvd1_msgs[event.message.correlation_id] = event.message.correlation_id
if event.receiver == self.receiver2:
if self.recvd2_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 2"
self.timeout()
self.n_rcvd2 += 1
self.recvd2_msgs[event.message.correlation_id] = event.message.correlation_id
if event.receiver == self.receiver3:
if self.recvd3_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 3"
self.timeout()
self.n_rcvd3 += 1
self.recvd3_msgs[event.message.correlation_id] = event.message.correlation_id
if self.n_rcvd1 == self.count and self.n_rcvd2 == self.count and \
self.n_rcvd3 == self.count:
self.timer.cancel()
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
self.sender_conn.close()
def run(self):
Container(self).run()
class MobileAddrMcastDroppedRxTest(MobileAddressMulticastTest):
# failure scenario - cause some receiving clients to close while a large
# message is in transit
def __init__(self, receiver1_host, receiver2_host, receiver3_host,
sender_host, address, check_addr_host=None, large_msg=True):
super(MobileAddrMcastDroppedRxTest, self).__init__(receiver1_host,
receiver2_host,
receiver3_host,
sender_host,
address,
check_addr_host=check_addr_host,
large_msg=large_msg)
self.n_accepted = 0
self.n_released = 0
self.recv1_closed = False
self.recv2_closed = False
def _check_done(self):
if self.n_accepted + self.n_released == self.count:
self.receiver3_conn.close()
self.sender_conn.close()
self.timer.cancel()
def on_message(self, event):
super(MobileAddrMcastDroppedRxTest, self).on_message(event)
# start closing receivers
if self.n_rcvd1 == 50:
if not self.recv1_closed:
self.receiver1_conn.close()
self.recv1_closed = True
if self.n_rcvd2 == 75:
if not self.recv2_closed:
self.recv2_closed = True
self.receiver2_conn.close()
def on_accepted(self, event):
self.n_accepted += 1
self._check_done()
def on_released(self, event):
self.n_released += 1
self._check_done()
class MobileAddrMcastAnonSenderDroppedRxTest(MobileAddressMulticastTest):
# failure scenario - cause some receiving clients to close while a large
# message is in transit
def __init__(self, receiver1_host, receiver2_host, receiver3_host,
sender_host, address, check_addr_host=None, large_msg=True, anon_sender=True):
super(MobileAddrMcastAnonSenderDroppedRxTest, self).__init__(receiver1_host,
receiver2_host,
receiver3_host,
sender_host,
address,
check_addr_host=check_addr_host,
large_msg=large_msg,
anon_sender=anon_sender)
self.n_accepted = 0
self.n_released = 0
self.recv1_closed = False
self.recv2_closed = False
def _check_done(self):
if self.n_accepted + self.n_released == self.count:
self.receiver3_conn.close()
self.sender_conn.close()
self.timer.cancel()
def on_message(self, event):
super(MobileAddrMcastAnonSenderDroppedRxTest, self).on_message(event)
# start closing receivers
if self.n_rcvd1 == 50:
if not self.recv1_closed:
self.receiver1_conn.close()
self.recv1_closed = True
if self.n_rcvd2 == 75:
if not self.recv2_closed:
self.recv2_closed = True
self.receiver2_conn.close()
def on_accepted(self, event):
self.n_accepted += 1
self._check_done()
def on_released(self, event):
self.n_released += 1
self._check_done()
class MobileAddressEventTest(MessagingHandler):
def __init__(self, receiver1_host, receiver2_host, receiver3_host,
sender_host, interior_host, address):
super(MobileAddressEventTest, self).__init__(auto_accept=False)
self.receiver1_host = receiver1_host
self.receiver2_host = receiver2_host
self.receiver3_host = receiver3_host
self.sender_host = sender_host
self.address = address
self.receiver1_conn = None
self.receiver2_conn = None
self.receiver3_conn = None
self.sender_conn = None
self.recvd1_msgs = dict()
self.recvd2_msgs = dict()
self.recvd3_msgs = dict()
self.n_rcvd1 = 0
self.n_rcvd2 = 0
self.n_rcvd3 = 0
self.timer = None
self.receiver1 = None
self.receiver2 = None
self.receiver3 = None
self.sender = None
self.interior_host = interior_host
self.container = None
self.count = 600
self.dup_msg = None
self.receiver_name = None
self.n_sent = 0
self.error = None
self.r_attaches = 0
self.n_released = 0
self.n_settled = 0
self.addr_timer = None
self.container = None
def timeout(self):
if self.dup_msg:
self.error = "%s received duplicate message %s" % \
(self.receiver_name, self.dup_msg)
else:
if not self.error:
self.error = "Timeout Expired - n_sent=%d n_rcvd1=%d " \
"n_rcvd2=%d n_rcvd3=%d addr=%s" % \
(self.n_sent, self.n_rcvd1, self.n_rcvd2,
self.n_rcvd3, self.address)
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
if self.sender_conn:
self.sender_conn.close()
def check_address(self):
local_node = Node.connect(self.interior_host, timeout=TIMEOUT)
outs = local_node.query(type='org.apache.qpid.dispatch.router.address')
remote_count = outs.attribute_names.index("remoteCount")
found = False
for result in outs.results:
if self.address in result[0]:
found = True
self.sender_conn = self.container.connect(self.sender_host)
self.sender = self.container.create_sender(self.sender_conn,
self.address)
break
if not found:
self.error = "Unable to create sender because of " \
"absence of address in the address table"
self.addr_timer.cancel()
self.timeout()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
# Create two receivers
self.receiver1_conn = event.container.connect(self.receiver1_host)
self.receiver2_conn = event.container.connect(self.receiver2_host)
self.receiver3_conn = event.container.connect(self.receiver3_host)
# Create all 3 receivers first.
self.receiver1 = event.container.create_receiver(self.receiver1_conn,
self.address)
self.receiver2 = event.container.create_receiver(self.receiver2_conn,
self.address)
self.receiver3 = event.container.create_receiver(self.receiver3_conn,
self.address)
self.container = event.container
self.addr_timer = event.reactor.schedule(1.0, AddrTimer(self))
def on_sendable(self, event):
if self.n_sent < self.count:
msg = Message(body="Message %d" % self.n_sent)
msg.correlation_id = self.n_sent
self.sender.send(msg)
self.n_sent += 1
def on_message(self, event):
if event.receiver == self.receiver1:
if self.recvd1_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 1"
self.timeout()
self.n_rcvd1 += 1
self.recvd1_msgs[
event.message.correlation_id] = event.message.correlation_id
event.delivery.settle()
if event.receiver == self.receiver2:
if self.recvd2_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 2"
self.timeout()
self.n_rcvd2 += 1
self.recvd2_msgs[
event.message.correlation_id] = event.message.correlation_id
event.delivery.settle()
if event.receiver == self.receiver3:
if self.recvd3_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 3"
self.timeout()
self.n_rcvd3 += 1
self.recvd3_msgs[
event.message.correlation_id] = event.message.correlation_id
event.delivery.settle()
def on_settled(self, event):
if self.n_rcvd1 + self.n_rcvd2 + self.n_rcvd3 == self.count and \
self.n_rcvd2 !=0 and self.n_rcvd3 !=0:
self.timer.cancel()
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
self.sender_conn.close()
def on_released(self, event):
self.n_released += 1
def run(self):
Container(self).run()
class EdgeListenerSender(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
super(EdgeListenerSender, cls).setUpClass()
def router(name, mode, connection, extra=None):
config = [
('router', {'mode': mode, 'id': name}),
('address',
{'prefix': 'multicast', 'distribution': 'multicast'}),
connection
]
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
edge_port_A = cls.tester.get_port()
router('INT.A', 'interior', ('listener', {'role': 'edge', 'port': edge_port_A}))
cls.routers[0].wait_ports()
# Without the fix for DISPATCH-1492, this test will fail because
# of the router crash.
def test_edge_listener_sender_crash_DISPATCH_1492(self):
addr = self.routers[0].addresses[0]
blocking_connection = BlockingConnection(addr)
blocking_sender = blocking_connection.create_sender(address="multicast")
self.assertTrue(blocking_sender!=None)
if __name__== '__main__':
unittest.main(main_module())
| 43.180656
| 127
| 0.538302
|
ab51374628545012ffa679a7f81ff466cdacf8ab
| 7,160
|
py
|
Python
|
heron/tools/tracker/src/python/topology.py
|
huijunwu/heron
|
4f7f90f2b823dc7e714ee2898b033cfc78f88cb0
|
[
"Apache-2.0"
] | 1
|
2020-09-07T17:27:56.000Z
|
2020-09-07T17:27:56.000Z
|
heron/tools/tracker/src/python/topology.py
|
windhamwong/incubator-heron
|
f764d9ad48c323f9980445431788503fe2100812
|
[
"Apache-2.0"
] | null | null | null |
heron/tools/tracker/src/python/topology.py
|
windhamwong/incubator-heron
|
f764d9ad48c323f9980445431788503fe2100812
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
''' topology.py '''
import traceback
import uuid
from heron.common.src.python.utils.log import Log
from heronpy.api import api_constants
# pylint: disable=too-many-instance-attributes
class Topology:
"""
Class Topology
Contains all the relevant information about
a topology that its state manager has.
All this info is fetched from state manager in one go.
The watches are the callbacks that are called
when there is any change in the topology
instance using set_physical_plan, set_execution_state,
set_tmaster, and set_scheduler_location. Any other means of changing will
not call the watches.
"""
def __init__(self, name, state_manager_name):
self.zone = None
self.name = name
self.state_manager_name = state_manager_name
self.physical_plan = None
self.packing_plan = None
self.execution_state = None
self.id = None
self.cluster = None
self.environ = None
self.tmaster = None
self.scheduler_location = None
# A map from UUIDs to the callback
# functions.
self.watches = {}
def register_watch(self, callback):
"""
Returns the UUID with which the watch is
registered. This UUID can be used to unregister
the watch.
Returns None if watch could not be registered.
The argument 'callback' must be a function that takes
exactly one argument, the topology on which
the watch was triggered.
Note that the watch will be unregistered in case
it raises any Exception the first time.
This callback is also called at the time
of registration.
"""
RETRY_COUNT = 5
# Retry in case UID is previously
# generated, just in case...
for _ in range(RETRY_COUNT):
# Generate a random UUID.
uid = uuid.uuid4()
if uid not in self.watches:
Log.info("Registering a watch with uid: " + str(uid))
try:
callback(self)
except Exception as e:
Log.error("Caught exception while triggering callback: " + str(e))
Log.debug(traceback.format_exc())
return None
self.watches[uid] = callback
return uid
return None
def unregister_watch(self, uid):
"""
Unregister the watch with the given UUID.
"""
# Do not raise an error if UUID is
# not present in the watches.
Log.info("Unregister a watch with uid: " + str(uid))
self.watches.pop(uid, None)
def trigger_watches(self):
"""
Call all the callbacks.
If any callback raises an Exception,
unregister the corresponding watch.
"""
to_remove = []
for uid, callback in list(self.watches.items()):
try:
callback(self)
except Exception as e:
Log.error("Caught exception while triggering callback: " + str(e))
Log.debug(traceback.format_exc())
to_remove.append(uid)
for uid in to_remove:
self.unregister_watch(uid)
def set_physical_plan(self, physical_plan):
""" set physical plan """
if not physical_plan:
self.physical_plan = None
self.id = None
else:
self.physical_plan = physical_plan
self.id = physical_plan.topology.id
self.trigger_watches()
def set_packing_plan(self, packing_plan):
""" set packing plan """
if not packing_plan:
self.packing_plan = None
self.id = None
else:
self.packing_plan = packing_plan
self.id = packing_plan.id
self.trigger_watches()
# pylint: disable=no-self-use
def get_execution_state_dc_environ(self, execution_state):
"""
Helper function to extract dc and environ from execution_state.
Returns a tuple (cluster, environ).
"""
return (execution_state.cluster, execution_state.environ)
def set_execution_state(self, execution_state):
""" set exectuion state """
if not execution_state:
self.execution_state = None
self.cluster = None
self.environ = None
else:
self.execution_state = execution_state
cluster, environ = self.get_execution_state_dc_environ(execution_state)
self.cluster = cluster
self.environ = environ
self.zone = cluster
self.trigger_watches()
def set_tmaster(self, tmaster):
""" set exectuion state """
self.tmaster = tmaster
self.trigger_watches()
def set_scheduler_location(self, scheduler_location):
""" set exectuion state """
self.scheduler_location = scheduler_location
self.trigger_watches()
def num_instances(self):
"""
Number of spouts + bolts
"""
num = 0
# Get all the components
components = self.spouts() + self.bolts()
# Get instances for each worker
for component in components:
config = component.comp.config
for kvs in config.kvs:
if kvs.key == api_constants.TOPOLOGY_COMPONENT_PARALLELISM:
num += int(kvs.value)
break
return num
def spouts(self):
"""
Returns a list of Spout (proto) messages
"""
if self.physical_plan:
return list(self.physical_plan.topology.spouts)
return []
def spout_names(self):
"""
Returns a list of names of all the spouts
"""
return [component.comp.name for component in self.spouts()]
def bolts(self):
"""
Returns a list of Bolt (proto) messages
"""
if self.physical_plan:
return list(self.physical_plan.topology.bolts)
return []
def bolt_names(self):
"""
Returns a list of names of all the bolts
"""
return [component.comp.name for component in self.bolts()]
def get_machines(self):
"""
Get all the machines that this topology is running on.
These are the hosts of all the stmgrs.
"""
if self.physical_plan:
stmgrs = list(self.physical_plan.stmgrs)
return [s.host_name for s in stmgrs]
return []
def get_status(self):
"""
Get the current state of this topology.
The state values are from the topology.proto
RUNNING = 1, PAUSED = 2, KILLED = 3
if the state is None "Unknown" is returned.
"""
status = None
if self.physical_plan and self.physical_plan.topology:
status = self.physical_plan.topology.state
if status == 1:
return "Running"
if status == 2:
return "Paused"
if status == 3:
return "Killed"
return "Unknown"
| 28.64
| 77
| 0.671369
|
989b3661bd7506b4def6b534b0439e72679ac3be
| 1,111
|
py
|
Python
|
ndefcdf/config.py
|
agama-point/ndefcdf
|
79bc7b30c32e1d8f2ed59f625a4b35be68cace08
|
[
"Apache-2.0"
] | null | null | null |
ndefcdf/config.py
|
agama-point/ndefcdf
|
79bc7b30c32e1d8f2ed59f625a4b35be68cace08
|
[
"Apache-2.0"
] | null | null | null |
ndefcdf/config.py
|
agama-point/ndefcdf
|
79bc7b30c32e1d8f2ed59f625a4b35be68cace08
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021 Petr Kracik
from ndef.record import Record, GlobalRecord
from io import BytesIO
import json
class CDFConfig(GlobalRecord):
_type = 'cdf/cfg'
def __init__(self, value=None):
self._config = value
if not self._config:
self._config = {}
self._config['v'] = 1
self._config['key'] = "defaultkey"
self._config['config'] = {}
def __str__(self):
return ("NDEF CDFConfig: Version: {} Key: {} Config: {}".format(self.version, self.key, self.config))
def __repr__(self):
return self.__str__()
@property
def version(self):
return self._config['v']
@property
def key(self):
return self._config['key']
@property
def config(self):
return self._config['config']
def _encode_payload(self):
return json.dumps(self._config)
@classmethod
def _decode_payload(cls, octets, errors):
stream = BytesIO(octets)
data = stream.read()
return cls(json.loads(data))
Record.register_type(CDFConfig)
| 20.2
| 109
| 0.594959
|
22530598430d96c16e808a7fd43d37607bccbae5
| 242
|
py
|
Python
|
src/settings.py
|
carlos-ft/note
|
dc6b1c860a8dd5c1a4b88bff508a21d6172031e6
|
[
"MIT"
] | null | null | null |
src/settings.py
|
carlos-ft/note
|
dc6b1c860a8dd5c1a4b88bff508a21d6172031e6
|
[
"MIT"
] | null | null | null |
src/settings.py
|
carlos-ft/note
|
dc6b1c860a8dd5c1a4b88bff508a21d6172031e6
|
[
"MIT"
] | null | null | null |
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
DEBUG = True
# Application definition
#REDIS
REDIS_HOST = 'note-redis'
REDIS_PORT = 6379
| 18.615385
| 64
| 0.760331
|
a884f2b09d4ff0bbbbd5006457522ffcf5d4e5eb
| 2,141
|
py
|
Python
|
tests/unit/entities/test_process_entity.py
|
slyons/pyapacheatlas
|
48b0239dfc8c5624cf5e86b5dfd29a6e58f70280
|
[
"MIT"
] | 2
|
2021-01-28T20:31:54.000Z
|
2021-01-28T20:32:28.000Z
|
tests/unit/entities/test_process_entity.py
|
slyons/pyapacheatlas
|
48b0239dfc8c5624cf5e86b5dfd29a6e58f70280
|
[
"MIT"
] | null | null | null |
tests/unit/entities/test_process_entity.py
|
slyons/pyapacheatlas
|
48b0239dfc8c5624cf5e86b5dfd29a6e58f70280
|
[
"MIT"
] | null | null | null |
from pyapacheatlas.core.entity import AtlasEntity, AtlasProcess
def test_null_io():
p = AtlasProcess(name="test", typeName="Process", qualified_name="test",
inputs=None, outputs=None
)
assert(p.attributes["inputs"] is None)
assert(p.attributes["outputs"] is None)
d = p.to_json()
assert(d["attributes"]["inputs"] is None)
assert(d["attributes"]["outputs"] is None)
def test_setting_mixed():
e1 = AtlasEntity(name="e1", typeName="DataSet",
qualified_name="e1", guid=-1)
e2 = AtlasEntity(name="e2", typeName="DataSet",
qualified_name="e2", guid=-2)
p = AtlasProcess(name="test", typeName="Process", qualified_name="test",
inputs=[e1], outputs=[e2.to_json(minimum=True)]
)
assert(len(p.attributes["inputs"]) == 1)
assert(len(p.attributes["outputs"]) == 1)
assert(isinstance(p.attributes["inputs"][0], dict))
assert(isinstance(p.attributes["outputs"][0], dict))
# Should only have the minimum attributes necessary (3)
assert(all (len(v) ==3 for v in p.attributes["inputs"]))
assert(all (len(v) ==3 for v in p.attributes["outputs"]))
def test_adding_later():
e1 = AtlasEntity(name="e1", typeName="DataSet",
qualified_name="e1", guid=-1)
e2 = AtlasEntity(name="e2", typeName="DataSet",
qualified_name="e2", guid=-2)
e3 = AtlasEntity(name="e3", typeName="DataSet",
qualified_name="e3", guid=-2)
p = AtlasProcess(name="test", typeName="Process", qualified_name="test",
inputs=[], outputs=[]
)
p.addInput(e1)
p.addOutput(e2, e3.to_json(minimum=True))
assert(len(p.inputs) == 1)
assert(isinstance(p.attributes["inputs"][0], dict))
assert(len(p.inputs) == 1)
assert(all( [isinstance(e, dict) for e in p.attributes["inputs"]]))
# Should only have the minimum attributes necessary (3)
assert(all (len(v) ==3 for v in p.attributes["inputs"]))
assert(all (len(v) ==3 for v in p.attributes["outputs"]))
| 39.648148
| 76
| 0.59645
|
eb1a75a610a79f05b8fb7bbd69dee55534bde777
| 1,393
|
py
|
Python
|
BingGrabber.py
|
SuperChamp234/WebSearcher
|
c3d86ef508742678083a42c166c9c52bcc4e9065
|
[
"MIT"
] | 1
|
2020-08-03T15:09:13.000Z
|
2020-08-03T15:09:13.000Z
|
BingGrabber.py
|
ZainSiddavatam/WebSearcher
|
c3d86ef508742678083a42c166c9c52bcc4e9065
|
[
"MIT"
] | null | null | null |
BingGrabber.py
|
ZainSiddavatam/WebSearcher
|
c3d86ef508742678083a42c166c9c52bcc4e9065
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
import time
import csv
from itertools import zip_longest
description = []
Links = []
searchstring = "Anything you Like HERE"
browser = webdriver.Chrome(executable_path="chromedriver")
url = "https://www.bing.com"
browser.get(url)
browser.maximize_window()
search_bar = browser.find_element_by_xpath("//*[@id='sb_form_q']")
search_bar.send_keys(searchstring)
search_button = browser.find_element_by_xpath("//*[@id='sb_form']/label")
search_button.click()
time.sleep(2)
for i in range(1,11):
time.sleep(2)
browser.execute_script("window.scrollTo(0, document.body.scrollHeight);")
search_results = browser.find_elements_by_xpath("//*[@id='b_results']/li[@class = 'b_algo']/h2/a")
for result in search_results:
description.append(str(result.text))
urls = []
for result in search_results:
urls.append(result.get_attribute("href"))
for url in urls:
Links.append(str(url))
nextpage_button = browser.find_element_by_xpath('//*[@id="b_results"]/li/nav/ul/li/a[@class="sb_pagN sb_pagN_bp b_widePag sb_bp "]')
nextpage_button.click()
browser.quit()
d = [description, Links]
export_data = zip_longest(*d, fillvalue = '')
with open('binglinks.csv', 'w', newline='') as myfile:
wr = csv.writer(myfile)
wr.writerow(("Description", "Links"))
wr.writerows(export_data)
myfile.close()
| 32.395349
| 136
| 0.703518
|
4d6eb1ad582dba2508ce3587e508fb6e600f018e
| 16,170
|
py
|
Python
|
pydatastructs/trees/heaps.py
|
RidhimaKohli/pydatastructs
|
d5df69067ff4bb654237c9321d69eb7a2d35be9b
|
[
"BSD-3-Clause"
] | 1
|
2020-04-13T11:24:43.000Z
|
2020-04-13T11:24:43.000Z
|
pydatastructs/trees/heaps.py
|
Saptashrungi/pydatastructs
|
d575846a7bd185b4dc2a6bc929296d4bb1d9c756
|
[
"BSD-3-Clause"
] | null | null | null |
pydatastructs/trees/heaps.py
|
Saptashrungi/pydatastructs
|
d575846a7bd185b4dc2a6bc929296d4bb1d9c756
|
[
"BSD-3-Clause"
] | null | null | null |
from pydatastructs.utils.misc_util import _check_type, NoneType, TreeNode, BinomialTreeNode
from pydatastructs.linear_data_structures.arrays import (ArrayForTrees,
DynamicOneDimensionalArray, Array)
from pydatastructs.miscellaneous_data_structures.binomial_trees import BinomialTree
__all__ = [
'BinaryHeap',
'TernaryHeap',
'DHeap',
'BinomialHeap'
]
class Heap(object):
"""
Abstract class for representing heaps.
"""
pass
class DHeap(Heap):
"""
Represents D-ary Heap.
Parameters
==========
elements : list, tuple, Array
Optional, by default 'None'.
list/tuple/Array of initial TreeNode in Heap.
heap_property : str
If the key stored in each node is
either greater than or equal to
the keys in the node's children
then pass 'max'.
If the key stored in each node is
either less than or equal to
the keys in the node's children
then pass 'min'.
By default, the heap property is
set to 'min'.
Examples
========
>>> from pydatastructs.trees.heaps import DHeap
>>> min_heap = DHeap(heap_property="min", d=3)
>>> min_heap.insert(1, 1)
>>> min_heap.insert(5, 5)
>>> min_heap.insert(7, 7)
>>> min_heap.extract().key
1
>>> min_heap.insert(4, 4)
>>> min_heap.extract().key
4
>>> max_heap = DHeap(heap_property='max', d=2)
>>> max_heap.insert(1, 1)
>>> max_heap.insert(5, 5)
>>> max_heap.insert(7, 7)
>>> max_heap.extract().key
7
>>> max_heap.insert(6, 6)
>>> max_heap.extract().key
6
References
==========
.. [1] https://en.wikipedia.org/wiki/D-ary_heap
"""
__slots__ = ['_comp', 'heap', 'd', 'heap_property', '_last_pos_filled']
def __new__(cls, elements=None, heap_property="min", d=4):
obj = Heap.__new__(cls)
obj.heap_property = heap_property
obj.d = d
if heap_property == "min":
obj._comp = lambda key_parent, key_child: key_parent <= key_child
elif heap_property == "max":
obj._comp = lambda key_parent, key_child: key_parent >= key_child
else:
raise ValueError("%s is invalid heap property"%(heap_property))
if elements is None:
elements = DynamicOneDimensionalArray(TreeNode, 0)
elif _check_type(elements, (list,tuple)):
elements = DynamicOneDimensionalArray(TreeNode, len(elements), elements)
elif _check_type(elements, Array):
elements = DynamicOneDimensionalArray(TreeNode, len(elements), elements._data)
else:
raise ValueError(f'Expected a list/tuple/Array of TreeNode got {type(elements)}')
obj.heap = elements
obj._last_pos_filled = obj.heap._last_pos_filled
obj._build()
return obj
def _build(self):
for i in range(self._last_pos_filled + 1):
self.heap[i]._leftmost, self.heap[i]._rightmost = \
self.d*i + 1, self.d*i + self.d
for i in range((self._last_pos_filled + 1)//self.d, -1, -1):
self._heapify(i)
def _swap(self, idx1, idx2):
idx1_key, idx1_data = \
self.heap[idx1].key, self.heap[idx1].data
self.heap[idx1].key, self.heap[idx1].data = \
self.heap[idx2].key, self.heap[idx2].data
self.heap[idx2].key, self.heap[idx2].data = \
idx1_key, idx1_data
def _heapify(self, i):
while True:
target = i
l = self.d*i + 1
r = self.d*i + self.d
for j in range(l, r+1):
if j <= self._last_pos_filled:
target = j if self._comp(self.heap[j].key, self.heap[target].key) \
else target
else:
break
if target != i:
self._swap(target, i)
i = target
else:
break
def insert(self, key, data):
"""
Insert a new element to the heap according to heap property.
Parameters
==========
key
The key for comparison.
data
The data to be inserted.
Returns
=======
None
"""
new_node = TreeNode(key, data)
self.heap.append(new_node)
self._last_pos_filled += 1
i = self._last_pos_filled
self.heap[i]._leftmost, self.heap[i]._rightmost = self.d*i + 1, self.d*i + self.d
while True:
parent = (i - 1)//self.d
if i == 0 or self._comp(self.heap[parent].key, self.heap[i].key):
break
else:
self._swap(i, parent)
i = parent
def extract(self):
"""
Extract root element of the Heap.
Returns
=======
root_element : TreeNode
The TreeNode at the root of the heap,
if the heap is not empty.
None
If the heap is empty.
"""
if self._last_pos_filled == -1:
raise IndexError("Heap is empty.")
else:
element_to_be_extracted = TreeNode(self.heap[0].key, self.heap[0].data)
self._swap(0, self._last_pos_filled)
self.heap.delete(self._last_pos_filled)
self._last_pos_filled -= 1
self._heapify(0)
return element_to_be_extracted
def __str__(self):
to_be_printed = ['' for i in range(self._last_pos_filled + 1)]
for i in range(self._last_pos_filled + 1):
node = self.heap[i]
if node._leftmost <= self._last_pos_filled:
if node._rightmost <= self._last_pos_filled:
children = [x for x in range(node._leftmost, node._rightmost + 1)]
else:
children = [x for x in range(node._leftmost, self._last_pos_filled + 1)]
else:
children = []
to_be_printed[i] = (node.key, node.data, children)
return str(to_be_printed)
@property
def is_empty(self):
"""
Checks if the heap is empty.
"""
return self.heap._last_pos_filled == -1
class BinaryHeap(DHeap):
"""
Represents Binary Heap.
Parameters
==========
elements : list, tuple
Optional, by default 'None'.
List/tuple of initial elements in Heap.
heap_property : str
If the key stored in each node is
either greater than or equal to
the keys in the node's children
then pass 'max'.
If the key stored in each node is
either less than or equal to
the keys in the node's children
then pass 'min'.
By default, the heap property is
set to 'min'.
Examples
========
>>> from pydatastructs.trees.heaps import BinaryHeap
>>> min_heap = BinaryHeap(heap_property="min")
>>> min_heap.insert(1, 1)
>>> min_heap.insert(5, 5)
>>> min_heap.insert(7, 7)
>>> min_heap.extract().key
1
>>> min_heap.insert(4, 4)
>>> min_heap.extract().key
4
>>> max_heap = BinaryHeap(heap_property='max')
>>> max_heap.insert(1, 1)
>>> max_heap.insert(5, 5)
>>> max_heap.insert(7, 7)
>>> max_heap.extract().key
7
>>> max_heap.insert(6, 6)
>>> max_heap.extract().key
6
References
==========
.. [1] https://en.m.wikipedia.org/wiki/Binary_heap
"""
def __new__(cls, elements=None, heap_property="min"):
obj = DHeap.__new__(cls, elements, heap_property, 2)
return obj
class TernaryHeap(DHeap):
"""
Represents Ternary Heap.
Parameters
==========
elements : list, tuple
Optional, by default 'None'.
List/tuple of initial elements in Heap.
heap_property : str
If the key stored in each node is
either greater than or equal to
the keys in the node's children
then pass 'max'.
If the key stored in each node is
either less than or equal to
the keys in the node's children
then pass 'min'.
By default, the heap property is
set to 'min'.
Examples
========
>>> from pydatastructs.trees.heaps import TernaryHeap
>>> min_heap = TernaryHeap(heap_property="min")
>>> min_heap.insert(1, 1)
>>> min_heap.insert(5, 5)
>>> min_heap.insert(7, 7)
>>> min_heap.insert(3, 3)
>>> min_heap.extract().key
1
>>> min_heap.insert(4, 4)
>>> min_heap.extract().key
3
>>> max_heap = TernaryHeap(heap_property='max')
>>> max_heap.insert(1, 1)
>>> max_heap.insert(5, 5)
>>> max_heap.insert(7, 7)
>>> min_heap.insert(3, 3)
>>> max_heap.extract().key
7
>>> max_heap.insert(6, 6)
>>> max_heap.extract().key
6
References
==========
.. [1] https://en.wikipedia.org/wiki/D-ary_heap
.. [2] https://ece.uwaterloo.ca/~dwharder/aads/Algorithms/d-ary_heaps/Ternary_heaps/
"""
def __new__(cls, elements=None, heap_property="min"):
obj = DHeap.__new__(cls, elements, heap_property, 3)
return obj
class BinomialHeap(Heap):
"""
Represents binomial heap.
Parameters
==========
root_list: list/tuple/Array
By default, []
The list of BinomialTree object references
in sorted order.
Examples
========
>>> from pydatastructs import BinomialHeap
>>> b = BinomialHeap()
>>> b.insert(1, 1)
>>> b.insert(2, 2)
>>> b.find_minimum().key
1
>>> b.find_minimum().children[0].key
2
References
==========
.. [1] https://en.wikipedia.org/wiki/Binomial_heap
"""
__slots__ = ['root_list']
def __new__(cls, root_list=[]):
if not all((_check_type(root, BinomialTree))
for root in root_list):
raise TypeError("The root_list should contain "
"references to objects of BinomialTree.")
obj = Heap.__new__(cls)
obj.root_list = root_list
return obj
def merge_tree(self, tree1, tree2):
"""
Merges two BinomialTree objects.
Parameters
==========
tree1: BinomialTree
tree2: BinomialTree
"""
if (not _check_type(tree1, BinomialTree)) or \
(not _check_type(tree2, BinomialTree)):
raise TypeError("Both the trees should be of type "
"BinomalTree.")
ret_value = None
if tree1.root.key <= tree2.root.key:
tree1.add_sub_tree(tree2)
ret_value = tree1
else:
tree2.add_sub_tree(tree1)
ret_value = tree2
return ret_value
def _merge_heap_last_new_tree(self, new_root_list, new_tree):
"""
Merges last tree node in root list with the incoming tree.
"""
pos = -1
if len(new_root_list) > 0 and new_root_list[pos].order == new_tree.order:
new_root_list[pos] = self.merge_tree(new_root_list[pos], new_tree)
else:
new_root_list.append(new_tree)
def merge(self, other_heap):
"""
Merges current binomial heap with the given binomial heap.
Parameters
==========
other_heap: BinomialHeap
"""
if not _check_type(other_heap, BinomialHeap):
raise TypeError("Other heap is not of type BinomialHeap.")
new_root_list = []
i, j = 0, 0
while (i < len(self.root_list)) and \
(j < len(other_heap.root_list)):
new_tree = None
while self.root_list[i] is None:
i += 1
while other_heap.root_list[j] is None:
j += 1
if self.root_list[i].order == other_heap.root_list[j].order:
new_tree = self.merge_tree(self.root_list[i],
other_heap.root_list[j])
i += 1
j += 1
else:
if self.root_list[i].order < other_heap.root_list[j].order:
new_tree = self.root_list[i]
i += 1
else:
new_tree = other_heap.root_list[j]
j += 1
self._merge_heap_last_new_tree(new_root_list, new_tree)
while i < len(self.root_list):
new_tree = self.root_list[i]
self._merge_heap_last_new_tree(new_root_list, new_tree)
i += 1
while j < len(other_heap.root_list):
new_tree = other_heap.root_list[j]
self._merge_heap_last_new_tree(new_root_list, new_tree)
j += 1
self.root_list = new_root_list
def insert(self, key, data):
"""
Inserts new node with the given key and data.
key
The key of the node which can be operated
upon by relational operators.
data
The data to be stored in the new node.
"""
new_node = BinomialTreeNode(key, data)
new_tree = BinomialTree(root=new_node, order=0)
new_heap = BinomialHeap(root_list=[new_tree])
self.merge(new_heap)
def find_minimum(self, **kwargs):
"""
Finds the node with the minimum key.
Returns
=======
min_node: BinomialTreeNode
"""
if self.is_empty:
raise IndexError("Binomial heap is empty.")
min_node = None
idx, min_idx = 0, None
for tree in self.root_list:
if ((min_node is None) or
(tree is not None and tree.root is not None and
min_node.key > tree.root.key)):
min_node = tree.root
min_idx = idx
idx += 1
if kwargs.get('get_index', None) is not None:
return min_node, min_idx
return min_node
def delete_minimum(self):
"""
Deletes the node with minimum key.
"""
min_node, min_idx = self.find_minimum(get_index=True)
child_root_list = []
for k, child in enumerate(min_node.children):
if child is not None:
child_root_list.append(BinomialTree(root=child, order=k))
self.root_list.remove(self.root_list[min_idx])
child_heap = BinomialHeap(root_list=child_root_list)
self.merge(child_heap)
@property
def is_empty(self):
return len(self.root_list) == 0
def decrease_key(self, node, new_key):
"""
Decreases the key of the given node.
Parameters
==========
node: BinomialTreeNode
The node whose key is to be reduced.
new_key
The new key of the given node,
should be less than the current key.
"""
if node.key <= new_key:
raise ValueError("The new key "
"should be less than current node's key.")
node.key = new_key
while ((not node.is_root) and
(node.parent.key > node.key)):
node.parent.key, node.key = \
node.key, node.parent.key
node.parent.data, node.data = \
node.data, node.parent.data
node = node.parent
def delete(self, node):
"""
Deletes the given node.
Parameters
==========
node: BinomialTreeNode
The node which is to be deleted.
"""
self.decrease_key(node, self.find_minimum().key - 1)
self.delete_minimum()
| 30.111732
| 94
| 0.534199
|
007ad5fb9cdcb8632b2ea2f1bb7ba2617ae1e387
| 671
|
py
|
Python
|
SG/pipeline/old/laplacian.py
|
BiCroLab/WSI-analysis
|
9f55a7a5296d006f2da8adfb2fe6a22eebe3dc42
|
[
"MIT"
] | null | null | null |
SG/pipeline/old/laplacian.py
|
BiCroLab/WSI-analysis
|
9f55a7a5296d006f2da8adfb2fe6a22eebe3dc42
|
[
"MIT"
] | null | null | null |
SG/pipeline/old/laplacian.py
|
BiCroLab/WSI-analysis
|
9f55a7a5296d006f2da8adfb2fe6a22eebe3dc42
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
import numpy as np
import sys
import umap
import warnings
from scipy import sparse
import networkx as nx
warnings.filterwarnings('ignore')
import seaborn as sns;sns.set()
import matplotlib.pyplot as plt
from sklearn.preprocessing import normalize
import pandas as pd
############################################
W = sparse.load_npz(sys.argv[1]) # adj.npz
npyfilename = sys.argv[2] # 'localdata.npy'
localdata = np.load(npyfilename,allow_pickle=True)
L = nx.laplacian_matrix(nx.from_scipy_sparse_matrix(W,edge_attribute='weight'))
del W
laplace = L.dot(localdata)
print(laplace.shape)
np.save(npyfilename+'.laplacian',laplace)
| 23.964286
| 80
| 0.727273
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.