hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9df3952ad5a0ab92e2449d6885e6beaeddd68099
| 5,847
|
py
|
Python
|
tools/old/rep_train.py
|
XYHC-MMDA/Multi-modal-Multi-task-DA
|
ed8297eb489d50c580795713cccb72bc958f406f
|
[
"Apache-2.0"
] | 1
|
2020-11-05T19:51:23.000Z
|
2020-11-05T19:51:23.000Z
|
tools/old/rep_train.py
|
XYHC-MMDA/Multi-modal-Multi-task-DA
|
ed8297eb489d50c580795713cccb72bc958f406f
|
[
"Apache-2.0"
] | null | null | null |
tools/old/rep_train.py
|
XYHC-MMDA/Multi-modal-Multi-task-DA
|
ed8297eb489d50c580795713cccb72bc958f406f
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import division
import argparse
import copy
import logging
import mmcv
import os
import time
import torch
from mmcv import Config, DictAction
from mmcv.runner import init_dist
from os import path as osp
from mmdet3d import __version__
from mmdet3d.datasets import build_dataset
from mmdet3d.models import build_detector
from mmdet3d.utils import collect_env, get_root_logger
# from mmdet.apis import set_random_seed, train_detector
from mmdet3d.apis import set_random_seed, rep_train_detector
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work-dir', help='the dir to save logs and models')
parser.add_argument(
'--resume-from', help='the checkpoint file to resume from')
parser.add_argument(
'--no-validate',
action='store_true',
help='whether not to evaluate the checkpoint during training')
group_gpus = parser.add_mutually_exclusive_group()
group_gpus.add_argument(
'--gpus',
type=int,
help='number of gpus to use '
'(only applicable to non-distributed training)')
group_gpus.add_argument(
'--gpu-ids',
type=int,
nargs='+',
help='ids of gpus to use '
'(only applicable to non-distributed training)')
parser.add_argument('--seed', type=int, default=0, help='random seed')
parser.add_argument(
'--deterministic',
action='store_true',
help='whether to set deterministic options for CUDNN backend.')
parser.add_argument(
'--options', nargs='+', action=DictAction, help='arguments in dict')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
parser.add_argument(
'--autoscale-lr',
action='store_true',
help='automatically scale lr with the number of gpus')
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if args.options is not None:
cfg.merge_from_dict(args.options)
# set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
# work_dir is determined in this priority: CLI > segment in file > filename
if args.work_dir is not None:
# update configs according to CLI args if args.work_dir is not None
cfg.work_dir = args.work_dir
elif cfg.get('work_dir', None) is None:
# use config filename as default work_dir if cfg.work_dir is None
cfg.work_dir = osp.join('./work_dirs',
osp.splitext(osp.basename(args.config))[0])
if args.resume_from is not None:
cfg.resume_from = args.resume_from
if args.gpu_ids is not None:
cfg.gpu_ids = args.gpu_ids
else:
cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus)
if args.autoscale_lr:
# apply the linear scaling rule (https://arxiv.org/abs/1706.02677)
cfg.optimizer['lr'] = cfg.optimizer['lr'] * len(cfg.gpu_ids) / 8
# init distributed env first, since logger depends on the dist info.
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
# create work_dir
mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))
# init the logger before other steps
timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime())
log_file = osp.join(cfg.work_dir, f'{timestamp}.log')
logger = get_root_logger(log_file=log_file, log_level=cfg.log_level)
# add a logging filter
logging_filter = logging.Filter('mmdet')
logging_filter.filter = lambda record: record.find('mmdet') != -1
# init the meta dict to record some important information such as
# environment info and seed, which will be logged
meta = dict()
# log env info
env_info_dict = collect_env()
env_info = '\n'.join([(f'{k}: {v}') for k, v in env_info_dict.items()])
dash_line = '-' * 60 + '\n'
logger.info('Environment info:\n' + dash_line + env_info + '\n' +
dash_line)
meta['env_info'] = env_info
# log some basic info
logger.info(f'Distributed training: {distributed}')
logger.info(f'Config:\n{cfg.pretty_text}')
# set random seeds
if args.seed is not None:
logger.info(f'Set random seed to {args.seed}, '
f'deterministic: {args.deterministic}')
set_random_seed(args.seed, deterministic=args.deterministic)
cfg.seed = args.seed
meta['seed'] = args.seed
model = build_detector(
cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg)
logger.info(f'Model:\n{model}')
datasets = [build_dataset(cfg.data.train)]
if len(cfg.workflow) == 2:
val_dataset = copy.deepcopy(cfg.data.val)
val_dataset.pipeline = cfg.data.train.pipeline
datasets.append(build_dataset(val_dataset))
if cfg.checkpoint_config is not None:
# save mmdet version, config file content and class names in
# checkpoints as meta data
cfg.checkpoint_config.meta = dict(
mmdet_version=__version__,
config=cfg.pretty_text,
CLASSES=datasets[0].CLASSES)
# add an attribute for visualization convenience
model.CLASSES = datasets[0].CLASSES
# reproduce; without disc
rep_train_detector(model, datasets, cfg,
distributed=distributed, timestamp=timestamp, meta=meta)
if __name__ == '__main__':
main()
| 35.871166
| 79
| 0.661878
|
656237a02de658ffaa48195c43173a050ccbf509
| 802
|
py
|
Python
|
main.py
|
satishnarasimhan/outliers
|
2094281d75d11f7a3925759e8dc19c9404c9a4a7
|
[
"MIT"
] | null | null | null |
main.py
|
satishnarasimhan/outliers
|
2094281d75d11f7a3925759e8dc19c9404c9a4a7
|
[
"MIT"
] | null | null | null |
main.py
|
satishnarasimhan/outliers
|
2094281d75d11f7a3925759e8dc19c9404c9a4a7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu May 20 16:21:46 2021
@author: Satish Narasimhan
This script will generate the list of securities / instruments which have ITM selling
"""
from datetime import datetime
import pandas as pd
# Determine current date
day = datetime.now()
date = day.strftime("%d%m%y")
#date = '170621'
print(date)
column_names = []
sourcePath = 'C:\\Users\\Path\\To\\Raw\\Files'
#Historical: \\Apr-2021\\ .. Raw Files\\
fileName = 'op'+date+'.csv'
csvfile = (sourcePath+fileName)
df = pd.DataFrame(columns = column_names)
df = pd.read_csv(csvfile)
# PE - ITM Selling
pe_res = df.query("")
# Processed file path
pe_res.to_csv(r'')
print("PE file generated successfully")
# CE -ITM Selling
ce_res = df.query("")
ce_res.to_csv(r'')
print("CE file generated successfully")
| 20.564103
| 85
| 0.698254
|
48aa1643bc69ebe88754e9067179b99923448f52
| 3,801
|
py
|
Python
|
fr_celery/settings.py
|
johnbartholomew/bookwyrm
|
a6593eced7db88f0a68bd19a0e6ba441bf1053c3
|
[
"CC0-1.0"
] | null | null | null |
fr_celery/settings.py
|
johnbartholomew/bookwyrm
|
a6593eced7db88f0a68bd19a0e6ba441bf1053c3
|
[
"CC0-1.0"
] | null | null | null |
fr_celery/settings.py
|
johnbartholomew/bookwyrm
|
a6593eced7db88f0a68bd19a0e6ba441bf1053c3
|
[
"CC0-1.0"
] | null | null | null |
"""
Django settings for fr_celery project.
Generated by 'django-admin startproject' using Django 3.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
from environs import Env
env = Env()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# celery/rebbitmq
CELERY_BROKER_URL = env('CELERY_BROKER')
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_BACKEND = 'redis'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0a^0gpwjc1ap+lb$dinin=efc@e&_0%102$o3(>9e7lndiaw'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'fr_celery',
'fedireads',
'celery',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'fr_celery.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fr_celery.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
FEDIREADS_DATABASE_BACKEND = env('FEDIREADS_DATABASE_BACKEND', 'postgres')
FEDIREADS_DBS = {
'postgres': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': env('POSTGRES_DB', 'fedireads'),
'USER': env('POSTGRES_USER', 'fedireads'),
'PASSWORD': env('POSTGRES_PASSWORD', 'fedireads'),
'HOST': env('POSTGRES_HOST', ''),
'PORT': 5432
},
'sqlite': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'fedireads.db')
}
}
DATABASES = {
'default': FEDIREADS_DBS[FEDIREADS_DATABASE_BACKEND]
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| 25.857143
| 91
| 0.691397
|
e23e41f85e2518bdd19807285abea1f366214539
| 1,391
|
py
|
Python
|
blobinstall.py
|
thanm/devel-scripts
|
f8777a7c282b4b15f004e22ebf5c6b6751b82f87
|
[
"Apache-2.0"
] | null | null | null |
blobinstall.py
|
thanm/devel-scripts
|
f8777a7c282b4b15f004e22ebf5c6b6751b82f87
|
[
"Apache-2.0"
] | null | null | null |
blobinstall.py
|
thanm/devel-scripts
|
f8777a7c282b4b15f004e22ebf5c6b6751b82f87
|
[
"Apache-2.0"
] | 3
|
2019-05-18T23:01:33.000Z
|
2021-09-18T14:17:10.000Z
|
#!/usr/bin/python3
"""Install a 'blob' file from an extract shell archive.
This script installs the 3rd party blob contained in a previously
downloaded extract-*.sh file. This avoids the need to have to page
through and accept the user agreement (which is what you have to do if
you execute the archive directly).
"""
import locale
import os
import re
import subprocess
import sys
import script_utils as u
#......................................................................
me = sys.argv[0]
mebase = os.path.basename(me)
if len(sys.argv) != 2:
u.error("%s: supply exactly one argument" % mebase)
arg = sys.argv[1]
if not re.compile(r"extract\-.+\.sh$").match(arg):
u.warning("arg '%s' does not match template extract*.sh" % arg)
if not os.path.exists(arg):
u.error("unable to access file arg '%s'" % arg)
u.verbose(0, "... examining '%s'" % arg)
matcher = re.compile(r"tail \-n \+\d+ .+ tar zxv")
cmd = ""
encoding = locale.getdefaultlocale()[1]
with open(arg, "rb") as fin:
for line in fin:
decoded = line.decode(encoding)
if matcher.match(decoded):
# found
cmd = re.sub(r"\$0", arg, decoded.rstrip())
break
if not cmd:
u.error("could not locate tail/tar line with proper form in '%s'" % arg)
u.verbose(0, "... extracting files from '%s'" % arg)
rc = subprocess.call(cmd, shell=True)
if rc != 0:
u.error("error: cmd failed: %s" % cmd)
| 25.759259
| 74
| 0.635514
|
53637bdbc183e3e28778e907cc8fa64b770fa105
| 3,186
|
py
|
Python
|
tests/vfs/sqlite_blob_file_system.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | 2
|
2016-02-18T12:46:26.000Z
|
2022-03-13T03:05:05.000Z
|
tests/vfs/sqlite_blob_file_system.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | null | null | null |
tests/vfs/sqlite_blob_file_system.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | 5
|
2016-12-18T08:05:39.000Z
|
2019-11-19T21:18:00.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the file system implementation using sqlite blob."""
import os
import unittest
from dfvfs.path import sqlite_blob_path_spec
from dfvfs.path import os_path_spec
from dfvfs.resolver import context
from dfvfs.vfs import sqlite_blob_file_system
class SqliteBlobFileSystemTest(unittest.TestCase):
"""The unit test for the sqlite blob file system object."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._resolver_context = context.Context()
test_file = os.path.join(u'test_data', u'blob.db')
path_spec = os_path_spec.OSPathSpec(location=test_file)
self._sqlite_blob_path_spec = sqlite_blob_path_spec.SQLiteBlobPathSpec(
table_name=u'myblobs', column_name=u'blobs',
row_condition=(u'name', u'==', u'mmssms.db'), parent=path_spec)
self._sqlite_blob_path_spec_2 = sqlite_blob_path_spec.SQLiteBlobPathSpec(
table_name=u'myblobs', column_name=u'blobs',
row_index=2, parent=path_spec)
def testOpenAndClose(self):
"""Test the open and close functionality."""
file_system = sqlite_blob_file_system.SQLiteBlobFileSystem(
self._resolver_context)
self.assertNotEqual(file_system, None)
file_system.Open(path_spec=self._sqlite_blob_path_spec)
file_system.Close()
file_system.Open(path_spec=self._sqlite_blob_path_spec_2)
file_system.Close()
def testFileEntryExistsByPathSpec(self):
"""Test the file entry exists by path specification functionality."""
file_system = sqlite_blob_file_system.SQLiteBlobFileSystem(
self._resolver_context)
self.assertNotEqual(file_system, None)
file_system.Open(path_spec=self._sqlite_blob_path_spec)
self.assertTrue(file_system.FileEntryExistsByPathSpec(
self._sqlite_blob_path_spec))
self.assertTrue(file_system.FileEntryExistsByPathSpec(
self._sqlite_blob_path_spec_2))
file_system.Close()
def testGetFileEntryByPathSpec(self):
"""Test the get entry by path specification functionality."""
file_system = sqlite_blob_file_system.SQLiteBlobFileSystem(
self._resolver_context)
self.assertNotEqual(file_system, None)
file_system.Open(path_spec=self._sqlite_blob_path_spec)
file_entry = file_system.GetFileEntryByPathSpec(self._sqlite_blob_path_spec)
self.assertNotEqual(file_entry, None)
self.assertEqual(file_entry.name, u'WHERE name == \'mmssms.db\'')
file_entry = file_system.GetFileEntryByPathSpec(
self._sqlite_blob_path_spec_2)
self.assertNotEqual(file_entry, None)
self.assertEqual(file_entry.name, u'OFFSET 2')
file_system.Close()
def testGetRootFileEntry(self):
"""Test the get root file entry functionality."""
file_system = sqlite_blob_file_system.SQLiteBlobFileSystem(
self._resolver_context)
self.assertNotEqual(file_system, None)
file_system.Open(path_spec=self._sqlite_blob_path_spec)
file_entry = file_system.GetRootFileEntry()
self.assertNotEqual(file_entry, None)
self.assertEqual(file_entry.name, u'myblobs.blobs')
file_system.Close()
if __name__ == '__main__':
unittest.main()
| 32.510204
| 80
| 0.754237
|
9f4794f33cef602630e703d2229283ead10a1794
| 58,351
|
py
|
Python
|
shellnoob.py
|
hpolloni/shellnoob
|
31110c2de7e6979d9962ddcf90805f5e2866c927
|
[
"MIT"
] | null | null | null |
shellnoob.py
|
hpolloni/shellnoob
|
31110c2de7e6979d9962ddcf90805f5e2866c927
|
[
"MIT"
] | null | null | null |
shellnoob.py
|
hpolloni/shellnoob
|
31110c2de7e6979d9962ddcf90805f5e2866c927
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Yanick Fratantonio
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function
__author__ = 'Yanick Fratantonio <yanick@cs.ucsb.edu>'
__description__ = 'Toolkit to write shellcodes'
__version__ = "2.0"
import os
import sys
import re
import stat
import traceback
import shutil
import socket
from tempfile import mktemp, NamedTemporaryFile
from subprocess import call, Popen, PIPE
import binascii
PY2 = sys.version_info.major == 2
try:
import IPython
except ImportError:
pass
if PY2:
import urllib2
input = raw_input
cbytes = lambda source, encoding='utf-8': bytes(source)
cstr = lambda source, encoding='utf-8': str(source)
urlread = lambda url: urllib2.urlopen(url).read()
HTTPError = urllib2.HTTPError
else:
import urllib
import urllib.request
import urllib.error
cbytes = lambda source, encoding='utf-8': bytes(source, encoding)
cstr = lambda source, encoding='utf-8': str(source, encoding)
urlread = lambda url: urllib.request.urlopen(url).read()
HTTPError = urllib.error.HTTPError
######################
### main functions ###
######################
def print_usage():
script_fn = os.path.basename(sys.argv[0])
print('%s [--from-INPUT] (input_file_path | - ) [--to-OUTPUT] [output_file_path | - ]' % script_fn, file=sys.stderr)
print('%s -c (prepend a breakpoint (Warning: only few platforms/OS are supported!)' % script_fn, file=sys.stderr)
print('%s --64 (64 bits mode, default: 32 bits)' % script_fn, file=sys.stderr)
print('%s --intel (intel syntax mode, default: att)' % script_fn, file=sys.stderr)
print('%s -q (quite mode)' % script_fn, file=sys.stderr)
print('%s -v (or -vv, -vvv)' % script_fn, file=sys.stderr)
print('%s --to-strace (compiles it & run strace)' % script_fn, file=sys.stderr)
print('%s --to-gdb (compiles it & run gdb & set breakpoint on entrypoint)' % script_fn, file=sys.stderr)
print('', file=sys.stderr)
print('Standalone "plugins"', file=sys.stderr)
print('%s -i [--to-asm | --to-opcode ] (for interactive mode)' % script_fn, file=sys.stderr)
print('%s --get-const <const>' % script_fn, file=sys.stderr)
print('%s --get-sysnum <sysnum>' % script_fn, file=sys.stderr)
print('%s --get-strerror <errno>' % script_fn, file=sys.stderr)
print('%s --file-patch <exe_fp> <file_offset> <data> (in hex). (Warning: tested only on x86/x86_64)' % script_fn, file=sys.stderr)
print('%s --vm-patch <exe_fp> <vm_address> <data> (in hex). (Warning: tested only on x86/x86_64)' % script_fn, file=sys.stderr)
print('%s --fork-nopper <exe_fp> (this nops out the calls to fork(). Warning: tested only on x86/x86_64)' % script_fn, file=sys.stderr)
print('', file=sys.stderr)
print('"Installation"', file=sys.stderr)
print('%s --install [--force] (this just copies the script in a convinient position)' % script_fn, file=sys.stderr)
print('%s --uninstall [--force]' % script_fn, file=sys.stderr)
print('', file=sys.stderr)
print('Supported INPUT format: %s' % ', '.join(ShellNoob.INPUT_FMT), file=sys.stderr)
print('Supported OUTPUT format: %s' % ', '.join(ShellNoob.OUTPUT_FMT), file=sys.stderr)
print('All combinations from INPUT to OUTPUT are supported!', file=sys.stderr)
print('', file=sys.stderr)
print('Check out the README file for more info.', file=sys.stderr)
class ShellNoob():
INPUT_FMT = ['asm', 'obj', 'bin', 'hex', 'c', 'shellstorm']
OUTPUT_FMT = ['asm', 'obj', 'exe', 'bin', 'hex', 'c', 'completec',
'python', 'bash', 'ruby', 'pretty', 'safeasm']
# {kernel#hardware#flag_64_bit#flag_intel}
objdump_options_map = {
'Linux#i[2-6]?86#32#att' : '',
'Linux#i[2-6]?86#32#intel' : '-m i386:intel',
'Linux#x86_64#32#att' : '',
'Linux#x86_64#32#intel' : '-m i386:intel',
'Linux#x86_64#64#att' : '',
'Linux#x86_64#64#intel' : '-m i386:x86-64:intel',
'Linux#arm.*#32#.*' : '',
'FreeBSD#i[2-6]?86#32#.*' : '',
'FreeBSD#amd64#32#att' : ''
}
# {kernel-hardware-flag_64_bit-flag_intel}
as_options_map = {
'Linux#i[2-6]?86#32#att' : '',
'Linux#i[2-6]?86#32#intel' : '-msyntax=intel -mnaked-reg',
'Linux#x86_64#32#att' : '--32',
'Linux#x86_64#32#intel' : '--32 -msyntax=intel -mnaked-reg',
'Linux#x86_64#64#att' : '',
'Linux#x86_64#64#intel' : '-msyntax=intel -mnaked-reg',
'Linux#arm.*#32#.*' : '',
'FreeBSD#i[2-6]?86#32#.*' : '',
'FreeBSD#amd64#32#att' : ''
}
# {kernel-hardware-flag_64_bit-flag_intel}
ld_options_map = {
'Linux#i[2-6]?86#32#.*' : '',
'Linux#x86_64#32#.*' : '-m elf_i386',
'Linux#x86_64#64#.*' : '',
'Linux#arm.*#32#.*' : '',
'FreeBSD#i[2-6]?86#32#.*' : '-m elf_i386_fbsd',
'FreeBSD#amd64#32#att' : '-m elf_i386_fbsd'
}
# {kernel-hardware-flag_64_bit-flag_intel}
gcc_options_map = {
'Linux#i[2-6]?86#32#.*' : '',
'Linux#x86_64#32#.*' : '-m32',
'Linux#x86_64#64#.*' : '',
'Linux#arm.*#32#.*' : '',
'FreeBSD#i[2-6]?86#32#.*' : '-m elf_i386_fbsd'
}
# {kernel-hardware}
breakpoint_hex_map = {
'.*#i[2-6]?86' : 'cc',
'.*#x86_64' : 'cc'
}
# {kernel-hardware}
comment_as_char = {
'.*#i[2-6]?86' : '#',
'.*#x86_64' : '#',
'.*#arm.*' : '@',
}
# [hardware]
hw_with_align = ['arm.*']
shellcode_t = ('.section .text\n'
'%s\n'
)
completec_t = (
'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/mman.h>
#include <errno.h>
#include <malloc.h>
%s
int len = (sizeof(shellcode) > 2048) ?sizeof(shellcode):2048;
int main() {
// make sure the memory is RWX to support self-modifying shellcode
char *target = (char *) memalign(4096, len);
mprotect(target, len, PROT_READ | PROT_WRITE | PROT_EXEC);
memcpy(target, shellcode, len);
(*(void (*)()) target)();
return 0;
}
'''
)
shellstorm_t = 'http://www.shell-storm.org/shellcode/files/shellcode-%s.php'
def __init__(self, flag_64_bit=False, flag_intel=False, with_breakpoint=False, verbose=0, keep_files=False):
self.shellnoob_fp = os.path.abspath(__file__)
self.flag_64_bit = '64' if flag_64_bit else '32'
self.flag_intel = 'intel' if flag_intel else 'att'
self.with_breakpoint = with_breakpoint
self.verbose = verbose
self.debug = True if self.verbose >= 4 else False
self.keep_files = keep_files
self.kernel = self.get_kernel()
self.hardware = self.get_hardware()
self.set_conv_functions()
self.check_compatibility()
def set_conv_functions(self):
for i in self.INPUT_FMT:
for o in self.OUTPUT_FMT:
func_name = '%s_to_%s' % (i, o)
if self.debug: print('Creating %s' % func_name, file=sys.stderr)
if i == o: continue
if func_name not in ShellNoob.__dict__:
# conversion not implemented: let's go through hex
setattr(ShellNoob, func_name, self.gen_conv_function(i, o))
def gen_conv_function(self, input_fmt, output_fmt):
# generate on-the-fly a conversion function going through the "hex" format
to_hex_func_name = '%s_to_hex' % input_fmt
from_hex_func_name = 'hex_to_%s' % output_fmt
to_hex = ShellNoob.__dict__[to_hex_func_name]
from_hex = ShellNoob.__dict__[from_hex_func_name]
def conv(self, input_s, with_breakpoint=False):
_hex = to_hex(self, input_s, with_breakpoint)
_output = from_hex(self, _hex)
return _output
return conv
def check_compatibility(self):
try:
self.get_objdump_options()
except ShellNoobException as e:
print('ERROR: %s' % e.message, file=sys.stderr)
sys.exit(2)
try:
self.get_as_options()
except ShellNoobException as e:
print('ERROR: %s' % e.message, file=sys.stderr)
sys.exit(2)
try:
self.get_ld_options()
except ShellNoobException as e:
print('ERROR: %s' % e.message, file=sys.stderr)
sys.exit(2)
if self.with_breakpoint:
try:
self.get_breakpoint_hex()
except ShellNoobException as e:
print('ERROR: %s' % e.message, file=sys.stderr)
sys.exit(2)
def get_objdump_options(self, kernel=None, hardware=None, flag_64_bit=None, flag_intel=None):
# use the passed settings, if specified
kernel = kernel if kernel is not None else self.kernel
hardware = hardware if hardware is not None else self.hardware
flag_64_bit = flag_64_bit if flag_64_bit is not None else self.flag_64_bit
flag_intel = flag_intel if flag_intel is not None else self.flag_intel
for entry, options in self.objdump_options_map.items():
e_kernel, e_hardware, e_64, e_intel = entry.split('#')
if not re.search(e_kernel, kernel): continue
if not re.search(e_hardware, hardware): continue
if not re.search(e_64, flag_64_bit): continue
if not re.search(e_intel, flag_intel): continue
if self.debug: print('MATCH with %s ~> %s' % (entry, options), file=sys.stderr)
return options
raise ShellNoobException('objdump_options not found for the current setup')
def get_as_options(self, kernel=None, hardware=None, flag_64_bit=None, flag_intel=None):
# use the passed settings, if specified
kernel = kernel if kernel is not None else self.kernel
hardware = hardware if hardware is not None else self.hardware
flag_64_bit = flag_64_bit if flag_64_bit is not None else self.flag_64_bit
flag_intel = flag_intel if flag_intel is not None else self.flag_intel
for entry, options in self.as_options_map.items():
e_kernel, e_hardware, e_64, e_intel = entry.split('#')
if not re.search(e_kernel, kernel): continue
if not re.search(e_hardware, hardware): continue
if not re.search(e_64, flag_64_bit): continue
if not re.search(e_intel, flag_intel): continue
if self.debug: print('MATCH with %s ~> %s' % (entry, options), file=sys.stderr)
return options
raise ShellNoobException('as_options not found for the current setup')
def get_ld_options(self, kernel=None, hardware=None, flag_64_bit=None, flag_intel=None):
# use the passed settings, if specified
kernel = kernel if kernel is not None else self.kernel
hardware = hardware if hardware is not None else self.hardware
flag_64_bit = flag_64_bit if flag_64_bit is not None else self.flag_64_bit
flag_intel = flag_intel if flag_intel is not None else self.flag_intel
for entry, options in self.ld_options_map.items():
e_kernel, e_hardware, e_64, e_intel = entry.split('#')
if not re.search(e_kernel, kernel): continue
if not re.search(e_hardware, hardware): continue
if not re.search(e_64, flag_64_bit): continue
if not re.search(e_intel, flag_intel): continue
if self.debug: print('MATCH with %s ~> %s' % (entry, options), file=sys.stderr)
return options
raise ShellNoobException('ld_options not found for the current setup')
def get_gcc_options(self, kernel=None, hardware=None, flag_64_bit=None, flag_intel=None):
# use the passed settings, if specified
kernel = kernel if kernel is not None else self.kernel
hardware = hardware if hardware is not None else self.hardware
flag_64_bit = flag_64_bit if flag_64_bit is not None else self.flag_64_bit
flag_intel = flag_intel if flag_intel is not None else self.flag_intel
for entry, options in self.gcc_options_map.items():
e_kernel, e_hardware, e_64, e_intel = entry.split('#')
if not re.search(e_kernel, kernel): continue
if not re.search(e_hardware, hardware): continue
if not re.search(e_64, flag_64_bit): continue
if not re.search(e_intel, flag_intel): continue
if self.debug: print('MATCH with %s ~> %s' % (entry, options), file=sys.stderr)
return options
raise ShellNoobException('gcc_options not found for the current setup')
def get_breakpoint_hex(self, kernel=None, hardware=None):
# use the passed settings, if specified
kernel = kernel if kernel is not None else self.kernel
hardware = hardware if hardware is not None else self.hardware
for entry, _hex in self.breakpoint_hex_map.items():
e_kernel, e_hardware = entry.split('#')
if not re.search(e_kernel, kernel): continue
if not re.search(e_hardware, hardware): continue
if self.debug: print('MATCH with %s-%s ~> %s' % (e_kernel, e_hardware, _hex), file=sys.stderr)
return _hex
raise ShellNoobException('the breakpoint feature is not supported in the current configuration')
def get_comment_as_char(self, kernel=None, hardware=None):
# use the passed settings, if specified
kernel = kernel if kernel is not None else self.kernel
hardware = hardware if hardware is not None else self.hardware
for entry, comment_char in self.comment_as_char.items():
e_kernel, e_hardware = entry.split('#')
if not re.search(e_kernel, kernel): continue
if not re.search(e_hardware, hardware): continue
if self.debug: print('MATCH with %s ~> %s' % (entry, comment_char), file=sys.stderr)
return comment_char
######################
# standalone plugins #
######################
def do_resolve_syscall(self, syscall, kernel=None, hardware=None):
global cstr
kernel = kernel if kernel is not None else self.kernel
hardware = hardware if hardware is not None else self.hardware
if (kernel, hardware) == ('Linux', 'x86_64'):
platforms = {'i386' : ['asm/unistd_32.h'],
'x86_64' : ['asm/unistd_64.h']
}
symbol = '__NR_%s' % syscall
else:
platforms = {'i386' : ['sys/syscall.h']}
symbol = 'SYS_%s' % syscall
body = 'printf("%%d", %s); return 0;' % (symbol)
for platform, includes in reversed(sorted(platforms.items())):
try:
tmp_exe_fp = self.include_and_body_to_exe_fp(includes, body)
except ShellNoobException:
print('ERROR: syscall %s not found for platform %s' % (syscall, platform), file=sys.stderr)
continue
p = Popen(tmp_exe_fp, stdout=PIPE)
output, error = p.communicate()
retval = p.returncode
if retval == 0:
print('%s ~> %s' % (platform, cstr(output, "utf-8")))
else:
print('ERROR: reval %s while resolving syscall %s' % (retval, syscall), file=sys.stderr)
if not self.keep_files:
os.unlink(tmp_exe_fp)
def do_resolve_const(self, const):
includes = ['sys/types.h',
'sys/stat.h',
'sys/mman.h',
'fcntl.h',
]
body = 'printf("%%d", %s); return 0;' % (const)
try:
tmp_exe_fp = self.include_and_body_to_exe_fp(includes, body)
except ShellNoobException:
print('ERROR: constant %s not found' % const, file=sys.stderr)
return
p = Popen(tmp_exe_fp, stdout=PIPE)
output, error = p.communicate()
retval = p.returncode
if retval == 0:
print('%s ~> %s' % (const, int(output)))
else:
print('ERROR: reval %s while resolving const %s' % (retval, const), file=sys.stderr)
if not self.keep_files:
os.unlink(tmp_exe_fp)
def do_resolve_errno(self, errno):
global cstr
includes = ['string.h']
body = 'printf("%%s", strerror(%s)); return 0;' % (errno)
try:
tmp_exe_fp = self.include_and_body_to_exe_fp(includes, body)
except ShellNoobException:
print('ERROR: errno %s not found' % const, file=sys.stderr)
return
p = Popen(tmp_exe_fp, stdout=PIPE)
output, error = p.communicate()
retval = p.returncode
if retval == 0:
print('%s ~> %s' % (errno, cstr(output, "utf-8")))
else:
print('ERROR: reval %s while resolving errno %s' % (retval, errno), file=sys.stderr)
if not self.keep_files:
os.unlink(tmp_exe_fp)
def do_interactive_mode(self, args):
global cbytes
asm_to_opcode_flag = None
if '--to-opcode' in args:
asm_to_opcode_flag = True
elif '--to-asm' in args:
asm_to_opcode_flag = False
if asm_to_opcode_flag is None:
print('asm_to_opcode (1) or opcode_to_asm (2)?: ', end='')
answer = input()
while answer != '1' and answer != '2':
print('seriously? dude, choose between 1 and 2: ', end='')
answer = input()
asm_to_opcode_flag = True if answer == '1' else False
assert asm_to_opcode_flag is not None
if asm_to_opcode_flag:
print('asm_to_opcode selected (type "quit" or ^C to end)')
ins = ''
quit = False
while not quit:
while not ins:
print('>> ', end='')
ins = input().strip(' \t\n')
if ins.lower() == 'quit':
quit = True
if quit: continue
try:
_hex = self.ins_to_hex(ins)
print('%s ~> %s' % (ins, _hex))
except Exception as e:
print('ERROR: %s' % e, file=sys.stderr)
if self.verbose >= 3:
print(traceback.format_exc(), file=sys.stderr)
print('--------------------------', file=sys.stderr)
ins = ''
else:
print('opcode_to_asm selected (type "quit" or ^C to end)')
_hex = ''
quit = False
while not quit:
while not _hex:
print('>> ', end='')
_hex = input().strip(' \t\n')
if _hex.lower() == 'quit':
quit = True
if quit: continue
try:
_hex = _hex.replace(' ','').strip(' \t\n')
asm = self.hex_to_pretty(_hex)
print('%s ~> %s' % (cbytes(_hex), asm))
except Exception as e:
print('ERROR: %s' % e, file=sys.stderr)
if self.verbose >= 3:
print(traceback.format_exc(), file=sys.stderr)
print('--------------------------', file=sys.stderr)
_hex = ''
def do_conversion(self, input_fp, output_fp, input_fmt, output_fmt):
global cbytes
if self.verbose >= 0:
if input_fmt == '-':
msg = 'Converting from stdin (%s) ' % input_fmt
else:
msg = 'Converting %s (%s) ' % (input_fp, input_fmt)
if output_fp == '-':
msg += 'to stdout (%s)' % output_fmt
else:
msg += 'into %s (%s)' % (output_fp, output_fmt)
print(msg, file=sys.stderr)
# reading the input
if input_fp == '-':
_input = sys.stdin.read()
else:
if input_fmt == 'shellstorm':
_input = input_fp # shellcode id
else:
_input = open(input_fp, 'rb').read()
conv_func_name = '%s_to_%s' % (input_fmt, output_fmt)
try:
_output = getattr(self, conv_func_name)(_input)
except AttributeError as err:
print('ERROR: conversion mode "%s" is not supported.' % conv_func_name, file=sys.stderr)
if self.verbose >= 3: print(traceback.format_exc(), end='')
sys.exit(2)
except ShellNoobException as err:
print('%s' % err, file=sys.stderr)
if self.verbose >= 3: print(traceback.format_exc(), end='')
sys.exit(2)
if not isinstance(_output, bytes):
_output = cbytes(_output)
# writing the output
if output_fp == '-':
sys.stdout.write(_output.decode(sys.stdout.encoding))
else:
open(output_fp, 'wb').write(_output)
if output_fmt == 'exe' and output_fp != '-':
# chmod 700
os.chmod(output_fp, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
def do_strace(self, input_fp, input_fmt):
if self.verbose >= 3: print('IN do_strace', file=sys.stderr)
exe_fp = mktemp()
self.do_conversion(input_fp, exe_fp, input_fmt, 'exe')
p = Popen('strace %s' % exe_fp, shell=True)
p.wait()
if not self.keep_files:
os.unlink(exe_fp)
if self.verbose >= 3: print('OUT do_strace', file=sys.stderr)
def do_gdb(self, input_fp, input_fmt):
if self.verbose >= 3: print('IN do_gdb', file=sys.stderr)
exe_fp = mktemp()
self.do_conversion(input_fp, exe_fp, input_fmt, 'exe')
start_addr = None
try:
start_addr = self.get_start_address(exe_fp)
except:
print('WARNING: failed to get the start address :-(', file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
print('------------------------', file=sys.stderr)
if start_addr:
cmd = 'gdb -ex "break *%s" -q %s' % (start_addr, exe_fp)
else:
cmd = 'gdb -q %s' % exe_fp
p = Popen(cmd, shell=True)
p.wait()
if not self.keep_files:
os.unlink(exe_fp)
if self.verbose >= 3: print('OUT do_gdb', file=sys.stderr)
#############################
# executable patching utils #
#############################
def get_bits(self, exe_fp):
bits = None
if '32-bit' in os.popen('file %s' % exe_fp).read():
bits = 32
elif '64-bit' in os.popen('file %s' % exe_fp).read():
bits = 64
assert bits is not None
return bits
def get_text_section_info(self, exe_fp):
bits = self.get_bits(exe_fp)
vm_address, file_offset, size = None, None, None
lines = os.popen('readelf -S %s' % exe_fp).read().split('\n')
if bits == 32:
for line in lines:
m = re.search('.text\s+\w+\s+([0-9a-f]+)\s+([0-9a-f]+)\s+([0-9a-f]+)', line)
if not m: continue
vm_address = int(m.group(1), 16)
file_offset = int(m.group(2), 16)
size = int(m.group(3), 16)
break
elif bits == 64:
for line in lines:
if vm_address is None and file_offset is None:
m = re.search('.text\s+\w+\s+([0-9a-f]+)\s+([0-9a-f]+)', line)
if not m: continue
vm_address = int(m.group(1), 16)
file_offset = int(m.group(2), 16)
continue
else:
m = re.search('\s+([0-9a-f]+)\s+[0-9a-f]+', line)
if not m: raise Exception('error while parsing readelf -S (64bit)')
size = int(m.group(1), 16)
break
else:
raise Exception('weird number of bits')
assert vm_address is not None and file_offset is not None and size is not None
return vm_address, file_offset, size
def get_file_offset_from_vm_address(self, exe_fp, vm_address):
start_vm, start_file, size = self.get_text_section_info(exe_fp)
assert start_vm <= vm_address <= start_vm + size
return vm_address - start_vm + start_file
def do_fork_nopper(self, exe_fp):
lines = os.popen('objdump -d %s' % exe_fp).read().split('\n')
for line in lines:
if self.verbose >= 1:
print(line)
m = re.search('([0-9a-f]+):\s+[0-9a-f ]+\s+call.*fork', line)
if not m: continue
vm_address = int(m.group(1), 16)
file_offset = self.get_file_offset_from_vm_address(exe_fp, vm_address)
print('Found call to fork @ 0x%x (file offset 0x%x)' % (vm_address, file_offset))
self.do_exe_patch(exe_fp, b'\x90\x90\x90\x31\xc0', file_offset)
def do_exe_patch(self, exe_fp, data, file_offset=None, vm_address=None, replace=True):
if not replace:
raise Exception('unsupported')
if file_offset is None and vm_address is None:
raise Exception('you need to specify at least one of the two ;)')
if file_offset is None:
file_offset = self.get_file_offset_from_vm_address(exe_fp, vm_address)
f = open(exe_fp, 'rb+')
f.seek(file_offset)
f.write(data)
f.close()
###################
### conversions ###
###################
def asm_to_hex(self, asm, with_breakpoint=None):
global cstr
if self.verbose >= 3: print('IN asm_to_hex', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
obj = self.asm_to_obj(asm, with_breakpoint)
_hex = self.obj_to_hex(obj, with_breakpoint=False)
if self.verbose >= 3: print('OUT asm_to_hex', file=sys.stderr)
return cstr(_hex)
def bin_to_hex(self, _bin, with_breakpoint=None):
global cbytes
if self.verbose >= 3: print('IN bin_to_hex', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
prepend = self.get_breakpoint_hex() if with_breakpoint else ''
if self.verbose >= 3: print('OUT bin_to_hex', file=sys.stderr)
return cbytes(prepend) + binascii.hexlify(_bin)
def obj_to_hex(self, obj, with_breakpoint=None):
if self.verbose >= 3: print('IN obj_to_hex', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
tmp_obj_f = NamedTemporaryFile(delete=False)
tmp_obj_fp = tmp_obj_f.name
tmp_obj_f.write(obj)
tmp_obj_f.close()
tmp_bin_fp = mktemp()
cmd = 'objcopy -O binary %s %s' % (tmp_obj_fp, tmp_bin_fp)
retval = self.exec_cmd(cmd, 'obj_to_hex')
try:
assert retval == 0
except:
raise Exception('Error while converting from obj_to_hex. Not valid ELF?')
_bin = open(tmp_bin_fp, 'rb').read()
_hex = self.bin_to_hex(_bin, with_breakpoint)
if not self.keep_files:
os.unlink(tmp_obj_fp)
os.unlink(tmp_bin_fp)
if self.verbose >= 3: print('OUT obj_to_hex', file=sys.stderr)
return _hex
def c_to_hex(self, c, with_breakpoint=None):
if self.verbose >= 3: print('IN c_to_hex', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
print('WARNING: c_to_hex just extracts the \\xXX looking parts. Check that everything it\'s fine!', file=sys.stderr)
def get_next_hex(buf):
slash_x_idx = buf.find('\\x')
if slash_x_idx == -1:
return '', ''
return buf[slash_x_idx+2:slash_x_idx+4], buf[slash_x_idx+4:]
prepend = self.get_breakpoint_hex() if with_breakpoint else ''
_hex = ''
_next = c
while _next:
hex_byte, _next = get_next_hex(_next)
_hex += hex_byte
_hex = prepend + _hex
if self.verbose >= 3: print('OUT c_to_hex', file=sys.stderr)
return _hex
def shellstorm_to_hex(self, shellstorm_id, with_breakpoint=None):
global cstr, urlread
if self.verbose >= 3: print('IN shellstorm_to_hex', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
print('WARNING: shellstorm_to_hex just extracts the \\xXX looking parts. Check that everything it\'s fine!', file=sys.stderr)
shellstorm_url = self.shellstorm_t % shellstorm_id
try:
content = cstr(urlread(shellstorm_url))
except HTTPError as err:
raise ShellNoobException('ERROR: failed fetching shellcode from %s (%s)' % (shellstorm_url, err))
# prefilter some html stuff
after_pre_idx = content.find('<pre>') + len('<pre>')
before_body_idx = content.find('<body>')
content = content[after_pre_idx:before_body_idx]
_hex = self.c_to_hex(content, with_breakpoint)
if self.verbose >= 3: print('OUT shellstorm_to_hex', file=sys.stderr)
return _hex
################
### hex_to_* ###
################
def hex_to_asm(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_asm', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
obj = self.hex_to_obj(_hex, with_breakpoint)
asm = self.obj_to_asm(obj, with_breakpoint=False)
if self.verbose >= 3: print('OUT hex_to_asm', file=sys.stderr)
return asm
def hex_to_obj(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_obj', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if not isinstance(_hex, str):
_hex = cstr(_hex)
if len(_hex) != 0 and _hex.endswith('\n'):
_hex = _hex.rstrip('\n')
print('Warning: stripped a \'\\n\' at the end of the hex', file=sys.stderr)
if len(_hex) == 0 or len(_hex) % 2 != 0:
raise Exception('Not valid _hex: %s' % _hex)
prepend = self.get_breakpoint_hex() if with_breakpoint else ''
_hex = prepend + _hex
asm = self.hex_to_asm_bytes(_hex)
obj = self.asm_to_obj(asm, with_breakpoint=False)
if self.verbose >= 3: print('OUT hex_to_obj', file=sys.stderr)
return obj
def hex_to_exe(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_exe', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
completec = self.hex_to_completec(_hex, with_breakpoint)
exe = self.c_to_exe(completec, with_breakpoint=False)
if self.verbose >= 3: print('OUT hex_to_exe', file=sys.stderr)
return exe
def hex_to_bin(self, _hex, with_breakpoint=None):
global cstr
if self.verbose >= 3: print('IN hex_to_bin', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if not isinstance(_hex, str):
_hex = cstr(_hex)
if len(_hex) != 0 and _hex.endswith('\n'):
_hex = _hex.rstrip('\n')
print('Warning: stripped a \'\\n\' at the end of the hex', file=sys.stderr)
if len(_hex) == 0 or len(_hex) % 2 != 0:
raise Exception('Not valid _hex: %s' % _hex)
prepend = self.get_breakpoint_hex() if with_breakpoint else ''
_hex = prepend + _hex
if self.verbose >= 3: print('OUT hex_to_bin', file=sys.stderr)
return binascii.unhexlify(_hex)
def hex_to_c(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_c', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if not isinstance(_hex, str):
_hex = cstr(_hex)
if len(_hex) != 0 and _hex.endswith('\n'):
_hex = _hex.rstrip('\n')
print('Warning: stripped a \'\\n\' at the end of the hex', file=sys.stderr)
if len(_hex) == 0 or len(_hex) % 2 != 0:
raise Exception('Not valid _hex: %s' % _hex)
prepend = self.get_breakpoint_hex() if with_breakpoint else ''
_hex = prepend + _hex
template = 'char shellcode[] = "%s";'
content = ''
for idx in range(0, len(_hex), 2):
content += '\\x%s' % _hex[idx:idx+2]
out = template % content
if self.verbose >= 3: print('OUT hex_to_c', file=sys.stderr)
return out
def hex_to_python(self, _hex, with_breakpoint=None):
global cstr
if self.verbose >= 3: print('IN hex_to_python', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if not isinstance(_hex, str):
_hex = cstr(_hex)
if len(_hex) != 0 and _hex.endswith('\n'):
_hex = _hex.rstrip('\n')
print('Warning: stripped a \'\\n\' at the end of the hex', file=sys.stderr)
if len(_hex) == 0 or len(_hex) % 2 != 0:
raise Exception('Not valid _hex: %s' % _hex)
prepend = self.get_breakpoint_hex() if with_breakpoint else ''
_hex = prepend + _hex
template = '%s'
content = ''
for idx in range(0, len(_hex), 2):
content += '\\x%s' % _hex[idx:idx+2]
out = template % content
if self.verbose >= 3: print('OUT hex_to_python', file=sys.stderr)
return out
def hex_to_bash(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_bash', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
out = self.hex_to_python(_hex, with_breakpoint)
if self.verbose >= 3: print('OUT hex_to_bash', file=sys.stderr)
return out
class AreYouFuckingKiddingMeException(Exception):
pass
def hex_to_ruby(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_ruby', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
# I'm not a ruby fan, and I really needed to put an easter egg :-)
raise AreYouFuckingKiddingMeException()
def hex_to_pretty(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_pretty', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
obj = self.hex_to_obj(_hex, with_breakpoint)
exe = self.obj_to_pretty(obj, with_breakpoint=False)
if self.verbose >= 3: print('OUT hex_to_pretty', file=sys.stderr)
return exe
def obj_to_pretty(self, obj, with_breakpoint=None):
if self.verbose >= 3: print('IN obj_to_pretty', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if with_breakpoint:
raise Exception('the with_breakpoint option is NOT supported in obj_to_exe')
if self.need_to_align():
_hex = self.obj_to_hex(obj)
if self.debug: print('hex lenght: ',len(_hex), file=sys.stderr)
aligned_hex = self.align_hex(_hex)
if self.debug: print('aligned hex lenght: ' , len(aligned_hex), file=sys.stderr)
if _hex != aligned_hex:
obj = self.hex_to_obj(aligned_hex, with_breakpoint=False)
tmp_obj_f = NamedTemporaryFile(delete=False)
tmp_obj_fp = tmp_obj_f.name
tmp_obj_f.write(obj)
tmp_obj_f.close()
tmp_pretty_fp = mktemp()
objdump_options = self.get_objdump_options()
cmd = 'objdump -d %s %s > %s' % (objdump_options,
tmp_obj_fp,
tmp_pretty_fp
)
self.exec_cmd(cmd, caller='obj_to_pretty')
pretty = open(tmp_pretty_fp).read()
started = False
lines = []
for line in pretty.split('\n'):
if not started and 'Disassembly of section .text:' in line:
started = True
if not started: continue
lines.append(line)
pretty = '\n'.join(lines)
if not self.keep_files:
os.unlink(tmp_obj_fp)
os.unlink(tmp_pretty_fp)
if self.verbose >= 3: print('OUT obj_to_pretty', file=sys.stderr)
return pretty
#########################
### additional blocks ###
#########################
def asm_to_obj(self, asm, with_breakpoint=None):
global cstr
if self.verbose >= 3: print('IN asm_to_obj', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if isinstance(asm, bytes):
asm = cstr(asm)
prepend = self.hex_to_asm_bytes(self.get_breakpoint_hex()) if with_breakpoint else ''
asm = prepend + asm + '\n'
tmp_asm_f = NamedTemporaryFile(delete=False)
tmp_asm_fp = tmp_asm_f.name
tmp_asm_f.write(asm.encode("utf-8"))
tmp_asm_f.close()
tmp_obj_fp = mktemp()
as_options = self.get_as_options()
cmd = 'as %s -o %s %s' % (as_options, tmp_obj_fp, tmp_asm_fp)
self.exec_cmd(cmd, caller='asm_to_obj')
if not os.path.isfile(tmp_obj_fp):
raise Exception("not valid shellcode (asm_to_obj)")
# delete all the symbols
cmd = 'strip %s' % tmp_obj_fp
self.exec_cmd(cmd, caller='asm_to_obj')
obj = open(tmp_obj_fp, 'rb').read()
if not self.keep_files:
os.unlink(tmp_asm_fp)
os.unlink(tmp_obj_fp)
if self.verbose >= 3: print('OUT asm_to_obj', file=sys.stderr)
return obj
def obj_to_asm(self, obj, with_breakpoint=None):
if self.verbose >= 3: print('IN obj_to_asm', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if self.need_to_align():
_hex = self.obj_to_hex(obj)
aligned_hex = self.align_hex(_hex)
if _hex != aligned_hex:
obj = self.hex_to_obj(aligned_hex, with_breakpoint=False)
tmp_obj_f = NamedTemporaryFile(delete=False)
tmp_obj_fp = tmp_obj_f.name
tmp_obj_f.write(obj)
tmp_obj_f.close()
objdump_options = self.get_objdump_options()
cmd = 'objdump -d %s %s | tr -s " "' % (objdump_options,
tmp_obj_fp,
)
if self.verbose >= 2: print('(obj_to_asm) Executing: %s' % cmd, file=sys.stderr)
obj_out = os.popen(cmd).read()
lines = obj_out.split('\n')
started = False
prepend = hex_to_asm_bytes(self.get_breakpoint_hex()) if with_breakpoint else ''
out_lines = []
max_asm_len, max_help_asm_len = 0, 0
for line in lines:
if not started and 'Disassembly of section .text:' in line:
started = True
continue
if not started: continue
comment_char = self.get_comment_as_char()
# asm started
m = re.search('[0-9a-f]+:\s+([0-9a-f ]+)\t(.*)$', line)
if not m:
continue
_hex = m.group(1).replace(' ', '').strip(' \t\n')
help_asm = self.hex_to_asm_bytes(_hex).rstrip('\n')
try:
_ascii = '.ascii "%s"' % _hex
_ascii = _ascii.strip(' \t\n')
except UnicodeDecodeError:
_ascii = ''
asm = m.group(2).strip(' \t\n')
sc_idx = asm.find(';')
if sc_idx != -1:
asm = asm[:sc_idx]
if len(asm) > max_asm_len:
max_asm_len = len(asm)
if len(help_asm) > max_help_asm_len:
max_help_asm_len = len(help_asm)
out_line = (asm, help_asm, _ascii)
out_lines.append(out_line)
out = prepend
out_fmt = ' {:<%d}\t{:} {:<%d} {:} {:}\n' % (max_asm_len, max_help_asm_len)
for (asm, help_asm, _ascii) in out_lines:
out += out_fmt.format(asm, comment_char, help_asm, comment_char, _ascii)
if not self.keep_files:
os.unlink(tmp_obj_fp)
shellcode = self.shellcode_t % out
if self.verbose >= 3: print('OUT obj_to_asm', file=sys.stderr)
return shellcode
def asm_to_exe(self, asm, with_breakpoint=None):
if self.verbose >= 3: print('IN asm_to_exe', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
_hex = self.asm_to_hex(asm, with_breakpoint)
exe = self.hex_to_exe(_hex, with_breakpoint=False)
if self.verbose >= 3: print('OUT asm_to_exe', file=sys.stderr)
return exe
def obj_to_exe(self, obj, with_breakpoint=None):
if self.verbose >= 3: print('IN obj_to_exe', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if with_breakpoint:
raise Exception('the with_breakpoint option is NOT supported in obj_to_exe')
tmp_obj_f = NamedTemporaryFile(delete=False)
tmp_obj_fp = tmp_obj_f.name
tmp_obj_f.write(obj)
tmp_obj_f.close()
tmp_exe_fp = mktemp()
ld_options = self.get_ld_options()
# note: ld -V to list all the emulations
cmd = 'ld -N %s %s -o %s' % (ld_options, tmp_obj_fp, tmp_exe_fp)
retval = self.exec_cmd(cmd, True, caller='obj_to_exe')
exe = open(tmp_exe_fp, 'rb').read()
if not self.keep_files:
os.unlink(tmp_obj_fp)
os.unlink(tmp_exe_fp)
if self.verbose >= 3: print('OUT obj_to_exe', file=sys.stderr)
return exe
def hex_to_safeasm(self, _hex, with_breakpoint=None):
global cstr
if self.verbose >= 3: print('IN hex_to_safeasm', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
if not isinstance(_hex, str):
_hex = cstr(_hex)
if len(_hex) != 0 and _hex.endswith('\n'):
_hex = _hex.rstrip('\n')
print('Warning: stripped a \'\\n\' at the end of the hex', file=sys.stderr)
if len(_hex) == 0 or len(_hex) % 2 != 0:
raise Exception('Not valid _hex: %s' % _hex)
prepend = self.get_breakpoint_hex() if with_breakpoint else ''
_hex = prepend + _hex
asm = self.hex_to_asm_bytes(_hex)
shellcode = self.shellcode_t % asm
if self.verbose >= 3: print('OUT hex_to_safeasm', file=sys.stderr)
return shellcode
def hex_to_completec(self, _hex, with_breakpoint=None):
if self.verbose >= 3: print('IN hex_to_completec', file=sys.stderr)
with_breakpoint = with_breakpoint if with_breakpoint is not None else self.with_breakpoint
c = self.hex_to_c(_hex, with_breakpoint)
completec = self.completec_t % c
if self.verbose >= 3: print('OUT hex_to_completec', file=sys.stderr)
return completec
def c_to_exe(self, c, with_breakpoint=None):
global cbytes
# NOTE assumption: the input is "compileable C"
if self.verbose >= 3: print('IN c_to_exe', file=sys.stderr)
if with_breakpoint:
raise Exception('the with_breakpoint option is NOT supported in c_to_exe')
if not isinstance(c, bytes):
c = cbytes(c)
tmp_c_f = NamedTemporaryFile(suffix='.c', delete=False)
tmp_c_fp = tmp_c_f.name
tmp_c_f.write(c)
tmp_c_f.close()
tmp_exe_fp = mktemp()
gcc_options = self.get_gcc_options()
cmd = 'gcc %s -o %s %s' % (gcc_options, tmp_exe_fp, tmp_c_fp)
retval = self.exec_cmd(cmd, True, caller='c_to_exe')
exe = open(tmp_exe_fp, 'rb').read()
if not self.keep_files:
os.unlink(tmp_c_fp)
os.unlink(tmp_exe_fp)
if self.verbose >= 3: print('OUT c_to_exe', file=sys.stderr)
return exe
########################
# additional functions #
########################
def ins_to_hex(self, ins):
asm = self.inss_to_asm([ins])
_hex = self.asm_to_hex(asm)
return _hex
def hex_to_inss(self, _hex):
asm = self.hex_to_asm(_hex)
inss = asm.split('\n')[1:]
inss = filter(lambda i:i.strip(' \t'), inss)
inss = map(lambda i:i.split('#')[0], inss)
inss = map(lambda i:i.strip(' \t'), inss)
return list(inss)
def inss_to_asm(self, inss):
out = '\n'.join(inss)
shellcode = self.shellcode_t % out
return shellcode
def asm_to_inss(self, asm):
inss = []
for i in asm.split('\n'):
i = i.strip(' \t\n')
if not i: continue
inss.append(i)
return inss
###########
# helpers #
###########
def hex_to_asm_bytes(self, _hex):
hex_list = ['0x%s' % _hex[i:i+2] for i in range(0, len(_hex), 2)]
asm = '.byte ' + ','.join(hex_list) + '\n'
return asm
def include_and_body_to_exe_fp(self, includes, body):
global cbytes
std_includes = set(('stdio.h', 'stdlib.h', 'errno.h'))
includes = set(includes)
includes.update(std_includes)
c_prog = ''
for inc in includes:
c_prog += '#include<%s>\n' % inc
c_prog += 'int main() {\n'
c_prog += body
c_prog += '}\n'
tmp_c_fp = mktemp() + '.c'
tmp_exe_fp = mktemp()
with open(tmp_c_fp, 'wb') as f:
f.write(cbytes(c_prog))
cmd = 'gcc %s -o %s' % (tmp_c_fp, tmp_exe_fp)
retval = self.exec_cmd(cmd, 'include_and_body_to_exe_fp')
if retval != 0:
output = ''
raise ShellNoobException()
if not self.keep_files:
os.unlink(tmp_c_fp)
return tmp_exe_fp
def get_start_address(self, exe_fp):
cmd = 'objdump -f %s' % exe_fp
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
_out, _err = p.communicate()
assert p.returncode == 0
_out = cstr(_out)
for line in _out.split('\n'):
line = line.strip(' \t\n')
m = re.search('^start address (0x[0-9a-f]+)$', line)
if not m: continue
start_addr = m.group(1)
return start_addr
raise Exception('start address not found for %s' % exe_fp)
def exec_cmd(self, cmd, redirect_stderr=False, caller=None):
if self.verbose >= 2: print('(exec_cmd: "%s") Executing: "%s"' % (caller, cmd), file=sys.stderr)
if redirect_stderr:
with open('/dev/null', 'wb') as f:
retval = call(cmd, stderr=f, shell=True)
else:
retval = call(cmd, shell=True)
if self.verbose >= 2: print('(exec_cmd: "%s") Ret value: %s' % (caller, retval), file=sys.stderr)
return retval
def do_objdump_switch(self):
# do we need to invert the bytes from objdump?
return self.get_hardware().startswith('arm')
def switch_bytes(self, _hex):
# input: a hex string, like 34ab01ac
# group them by 2 chars
_hex = [_hex[i:i+2] for i in range(0, len(_hex), 2)]
# reverse the list
_hex = list(reversed(_hex))
# build a string
_hex = ''.join(_hex)
return _hex
def need_to_align(self, hardware=None):
# use the passed settings, if specified
hardware = hardware if hardware is not None else self.hardware
for e_hardware in self.hw_with_align:
if not re.search(e_hardware, hardware): continue
if self.debug: print('MATCH with %s ~> %s' % (entry, options), file=sys.stderr)
return True
return False
def align_hex(self, _hex):
assert len(_hex) % 2 == 0
if (len(_hex)/2) % 4 != 0:
_hex = _hex + '00'*(4 - ((len(_hex)/2) % 4))
assert len(_hex) % 8 == 0
return _hex
@staticmethod
def get_kernel():
return os.popen('uname -s').read().strip()
@staticmethod
def get_hardware():
return os.popen('uname -m').read().strip()
@staticmethod
def do_install(force=False):
if os.getuid() != 0:
print('ERROR: I need root!', file=sys.stderr)
sys.exit(1)
install_dir = '/usr/local/bin'
shellnoob_fp = os.path.join(install_dir, 'snoob')
print('This will copy shellnoob into %s' % shellnoob_fp, file=sys.stderr)
if not force:
input('Press a key to proceed..')
shutil.copyfile(__file__, shellnoob_fp)
os.chmod(shellnoob_fp, stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH)
print('SUCCESS. "snoob -h" should display shellnoob\'s help', file=sys.stderr)
@staticmethod
def do_uninstall(force=False):
if os.getuid() != 0:
print('ERROR: I need root!', file=sys.stderr)
sys.exit(1)
install_dir = '/usr/local/bin'
shellnoob_fp = os.path.join(install_dir, 'snoob')
print('This will delete shellnoob from %s' % shellnoob_fp, file=sys.stderr)
if not force:
input('Press a key to proceed..')
os.unlink(shellnoob_fp)
class ShellNoobException(Exception):
pass
def main():
args = sys.argv[1:]
########################################################################
# WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING #
# This is the most awful CLI argument parsing ever. I started to do #
# it manually as I wanted a uber flexible CLI interface. I'm pretty #
# sure it's possible to achieve the same with optparse/argparse, but #
# I'm too noob for that. Somebody should write a OptParseNoob tool :-) #
########################################################################
# check for help
if '-h' in args or '--help' in args or len(args) == 0:
print_usage()
sys.exit(0)
# check for install/uninstall
if '--install' in args:
force = True if '--force' in args else False
ShellNoob.do_install(force)
sys.exit(0)
elif '--uninstall' in args:
force = True if '--force' in args else False
ShellNoob.do_uninstall(force)
sys.exit(0)
# parse the switches
flag_64_bit = False
if '--64' in args:
flag_64_bit = True
args.remove('--64')
flag_intel = False
if '--intel' in args:
flag_intel = True
args.remove('--intel')
with_breakpoint = False
if '-c' in args:
with_breakpoint = True
args.remove('-c')
keep_files = False
if '-k' in args:
keep_files = True
args.remove('-k')
verbose = 0
if '-vvvv' in args:
verbose = 4
args.remove('-vvvv')
if '-vvv' in args:
verbose = 3
args.remove('-vvv')
elif '-vv' in args:
verbose = 2
args.remove('-vv')
elif '-v' in args:
verbose = 1
args.remove('-v')
if '-q' in args:
verbose = -1
args.remove('-q')
snoob = ShellNoob(flag_64_bit=flag_64_bit, flag_intel=flag_intel,
with_breakpoint=with_breakpoint, verbose=verbose,
keep_files=keep_files
)
# parse the plugin switches
if '-i' in args:
snoob.do_interactive_mode(args)
sys.exit(0)
if '--get-const' in args:
idx = args.index('--get-const')
const = args[idx+1]
snoob.do_resolve_const(const)
sys.exit(0)
if '--get-sysnum' in args:
idx = args.index('--get-sysnum')
syscall = args[idx+1]
snoob.do_resolve_syscall(syscall)
sys.exit(0)
if '--get-errno' in args:
idx = args.index('--get-errno')
errno = args[idx+1]
snoob.do_resolve_errno(errno)
sys.exit(0)
do_strace_flag = False
if '--to-strace' in args:
do_strace_flag = True
args.remove('--to-strace')
do_gdb_flag = False
if '--to-gdb' in args:
do_gdb_flag = True
args.remove('--to-gdb')
if '--fork-nopper' in args:
idx = args.index('--fork-nopper')
exe_fp = args[idx+1]
snoob.do_fork_nopper(exe_fp)
sys.exit(0)
if '--file-patch' in args:
idx = args.index('--file-patch')
exe_fp = args[idx+1]
file_offset = int(args[idx+2], 16)
data = binascii.unhexlify(args[idx+3])
snoob.do_exe_patch(exe_fp, data, file_offset=file_offset)
sys.exit(0)
if '--vm-patch' in args:
idx = args.index('--vm-patch')
exe_fp = args[idx+1]
vm_address = int(args[idx+2], 16)
data = binascii.unhexlify(args[idx+3])
snoob.do_exe_patch(exe_fp, data, vm_address=vm_address)
sys.exit(0)
# parse the conversion arguments
curr_arg_idx = 0
# determine the input format
input_fmt = None
curr_arg = args[curr_arg_idx]
if curr_arg.startswith('--from-'):
input_fmt = curr_arg[len('--from-'):].lower()
if input_fmt not in snoob.INPUT_FMT:
raise Exception('input format "%s" not supported' % input_fmt)
curr_arg_idx += 1
input_fp = args[curr_arg_idx]
if input_fmt != 'shellstorm' and input_fp != '-':
input_fp = os.path.abspath(input_fp)
if not os.path.isfile(input_fp):
raise Exception('file %s does NOT exist' % input_fp)
curr_arg_idx += 1
if not input_fmt:
# ok, let's guess the input fmt
dot_idx = input_fp.rfind('.')
if dot_idx == -1:
raise Exception('Failed to guess the input format :/')
ext = input_fp[dot_idx+1:]
if ext in ['asm', 's', 'S', 'shell']:
input_fmt = 'asm'
elif ext in ['o', 'obj']:
input_fmt = 'obj'
elif ext in ['bin']:
input_fmt = 'bin'
elif ext in ['hex']:
input_fmt = 'hex'
elif ext in ['c', 'C']:
input_fmt = 'c'
else:
raise Exception('Input format "%s" is not supported' % input_fmt)
if do_strace_flag:
if snoob.verbose >= 1: print('do_strace mode selected', file=sys.stderr)
snoob.do_strace(input_fp, input_fmt)
sys.exit(0)
if do_gdb_flag:
if snoob.verbose >= 1: print('do_gdb mode selected', file=sys.stderr)
snoob.do_gdb(input_fp, input_fmt)
sys.exit(0)
output_fmt = None
curr_arg = args[curr_arg_idx]
if curr_arg.startswith('--to-'):
output_fmt = curr_arg[len('--to-'):].lower()
if output_fmt not in snoob.OUTPUT_FMT:
raise Exception('output format "%s" not supported' % output_fmt)
curr_arg_idx += 1
output_fp = None
if curr_arg_idx < len(args):
output_fp = args[curr_arg_idx]
curr_arg_idx += 1
if not output_fmt:
# ok, let's guess the output fmt
dot_idx = output_fp.rfind('.')
if dot_idx == -1:
raise Exception('Failed to guess the output format :/')
ext = output_fp[dot_idx+1:]
if ext in ['asm', 's', 'S', 'shell']:
output_fmt = 'asm'
elif ext in ['o', 'obj']:
output_fmt = 'obj'
elif ext in ['exe']:
output_fmt = 'exe'
elif ext in ['bin']:
output_fmt = 'bin'
elif ext in ['hex']:
output_fmt = 'hex'
elif ext in ['c', 'C']:
output_fmt = 'c'
elif ext in ['py', 'python']:
output_fmt = 'python'
elif ext in ['sh', 'bash']:
output_fmt = 'bash'
elif ext in ['rb', 'ruby']:
output_fmt = 'ruby'
elif ext in ['pretty']:
output_fmt = 'pretty'
elif ext in ['safeasm']:
output_fmt = 'safeasm'
else:
raise Exception('Output format "%s" is not supported' % output_fmt)
if output_fp == '-':
pass
elif output_fp != None:
output_fp = os.path.abspath(output_fp)
elif input_fp and input_fp != '-':
# choose the output_fp starting from the input_fp and input_fmt
input_pfx = input_fp[:input_fp.rfind('.')]
if output_fmt == 'completec':
output_ext = 'c'
else:
output_ext = output_fmt
output_fp = '%s.%s' % (input_pfx, output_ext)
else:
raise Exception('Specify the input/output filename!')
snoob.do_conversion(input_fp, output_fp, input_fmt, output_fmt)
if __name__== '__main__':
main()
| 37.284984
| 139
| 0.579562
|
904a1151f890c56ace6be32c1a918ff2955ea234
| 15,001
|
py
|
Python
|
pgsmo/objects/table_objects/column.py
|
DaeunYim/pgtoolsservice
|
b7e548718d797883027b2caee2d4722810b33c0f
|
[
"MIT"
] | 33
|
2019-05-27T13:04:35.000Z
|
2022-03-17T13:33:05.000Z
|
pgsmo/objects/table_objects/column.py
|
DaeunYim/pgtoolsservice
|
b7e548718d797883027b2caee2d4722810b33c0f
|
[
"MIT"
] | 31
|
2019-06-10T01:55:47.000Z
|
2022-03-09T07:27:49.000Z
|
pgsmo/objects/table_objects/column.py
|
DaeunYim/pgtoolsservice
|
b7e548718d797883027b2caee2d4722810b33c0f
|
[
"MIT"
] | 25
|
2019-05-13T18:39:24.000Z
|
2021-11-16T03:07:33.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import re
from typing import Optional, List, Dict
from smo.common.node_object import NodeObject, NodeCollection, NodeLazyPropertyCollection
from smo.common.scripting_mixins import ScriptableCreate, ScriptableDelete, ScriptableUpdate
from pgsmo.objects.server import server as s # noqa
import smo.utils.templating as templating
class Column(NodeObject, ScriptableCreate, ScriptableDelete, ScriptableUpdate):
TEMPLATE_ROOT = templating.get_template_root(__file__, 'column')
MACRO_ROOT = templating.get_template_root(__file__, '../table/macros')
@classmethod
def _from_node_query(cls, server: 's.Server', parent: NodeObject, **kwargs) -> 'Column':
"""
Creates a new Column object based on the the results from the column nodes query
:param server: Server that owns the column
:param parent: Parent object of the column. Should be a Table/View
:param kwargs: Optional parameters for the column
Kwargs:
name str: Name of the column
datatype str: Name of the type of the column
oid int: Object ID of the column
not_null bool: Whether or not null is allowed for the column
has_default_value bool: Whether or not the column has a default value constraint
isprimarykey bool: Whether or not the column is primary key
is_updatable bool: Whether or not the column is updatable or read only
isunique bool: Whether or not the column only accepts unique value or not
default: default value for the column
:return: Instance of the Column
"""
col = cls(server, parent, kwargs['name'], kwargs['datatype'])
col._oid = kwargs['oid']
col._has_default_value = kwargs['has_default_val']
col._not_null = kwargs['not_null']
col._column_ordinal = kwargs['oid'] - 1
col._is_key = kwargs['isprimarykey']
col._is_readonly = kwargs['is_updatable'] is False
col._is_unique = kwargs['isunique']
col._type_oid = kwargs['typoid']
col._default_value = kwargs['default'] if col._has_default_value is True else None
col._is_auto_increment = col._default_value is not None and col._default_value.startswith('nextval(')
return col
def __init__(self, server: 's.Server', parent: NodeObject, name: str, datatype: str):
"""
Initializes a new instance of a Column
:param server: Connection to the server/database that this object will belong to
:param parent: Parent object of the column, should be a Table/View
:param name: Name of the column
:param datatype: Type of the column
"""
self._server = server
self._parent: Optional['NodeObject'] = parent
self._name: str = name
self._oid: Optional[int] = None
self._is_system: bool = False
self._child_collections: Dict[str, NodeCollection] = {}
self._property_collections: List[NodeLazyPropertyCollection] = []
# Use _column_property_generator instead of _property_generator
self._full_properties: NodeLazyPropertyCollection = self._register_property_collection(self._column_property_generator)
ScriptableCreate.__init__(self, self._template_root(server), self._macro_root(), server.version)
ScriptableDelete.__init__(self, self._template_root(server), self._macro_root(), server.version)
ScriptableUpdate.__init__(self, self._template_root(server), self._macro_root(), server.version)
self._datatype: str = datatype
self._has_default_value: Optional[bool] = None
self._not_null: Optional[bool] = None
self._column_ordinal: int = None
self._is_key: bool = None
self._is_readonly: bool = None
self._is_unique: bool = None
self._type_oid: int = None
self._default_value: Optional[str] = None
self._is_auto_increment = None
def _column_property_generator(self):
template_root = self._template_root(self._server)
# Setup the parameters for the query
template_vars = self.template_vars
# Render and execute the template
sql = templating.render_template(
templating.get_template_path(template_root, 'properties.sql', self._server.version),
self._macro_root(),
**template_vars
)
cols, rows = self._server.connection.execute_dict(sql)
for row in rows:
if row['name'] == self._name:
return row
# PROPERTIES ###########################################################
@property
def datatype(self) -> str:
return self._datatype
@property
def has_default_value(self) -> Optional[bool]:
return self._has_default_value
@property
def not_null(self) -> Optional[bool]:
return self._not_null
@property
def column_ordinal(self) -> int:
return self._column_ordinal
@property
def is_key(self) -> bool:
return self._is_key
@property
def is_readonly(self) -> bool:
return self._is_readonly
@property
def is_unique(self) -> bool:
return self._is_unique
@property
def type_oid(self) -> int:
return self._type_oid
@property
def default_value(self) -> Optional[str]:
return self._default_value
@property
def is_auto_increment(self) -> bool:
return self._is_auto_increment
@property
def cltype(self):
return self._full_properties["cltype"]
@property
def schema(self):
return self._full_properties["schema"]
@property
def table(self):
return self._full_properties["table"]
@property
def displaytypname(self):
return self._full_properties["displaytypname"]
@property
def attlen(self):
length, precision = self.get_length_precision(self.elemoid)
if length:
matchObj = re.search(r'(\d+)', self.fulltype)
if matchObj:
return matchObj.group(1)
return None
@property
def elemoid(self):
return self._full_properties["elemoid"]
@property
def attprecision(self):
length, precision = self.get_length_precision(self.elemoid)
if precision:
matchObj = re.search(r'(\d+),(\d+)', self.fulltype)
if matchObj:
return matchObj.group(2)
return precision
@property
def hasSqrBracket(self):
if '[]' in self.cltype:
return True
else:
return False
@property
def fulltype(self):
fulltype = self.get_full_type(
self._full_properties['typnspname'], self._full_properties['typname'],
self._full_properties['isdup'], self._full_properties['attndims'], self._full_properties['atttypmod'])
return fulltype
@property
def collspcname(self):
return self._full_properties["collspcname"]
@property
def attnotnull(self):
return self._full_properties["attnotnull"]
@property
def defval(self):
return self._full_properties["defval"]
@property
def description(self):
return self._full_properties["description"]
@property
def attoptions(self):
return self._full_properties["attoptions"]
@property
def attacl(self):
return self._full_properties["attacl"]
@property
def seclabels(self):
return self._full_properties["seclabels"]
@property
def attstattarget(self):
return self._full_properties["attstattarget"]
@property
def attstorage(self):
return self._full_properties["attstorage"]
@property
def attidentity(self):
return self._full_properties["attidentity"]
@property
def colconstype(self):
return self._full_properties["colconstype"]
@property
def seqcache(self):
return self._full_properties["seqcache"]
@property
def seqcycle(self):
return self._full_properties["seqcycle"]
@property
def seqincrement(self):
return self._full_properties["seqincrement"]
@property
def seqmax(self):
return self._full_properties["seqmax"]
@property
def seqmin(self):
return self._full_properties["seqmin"]
@property
def seqrelid(self):
return self._full_properties["seqrelid"]
@property
def is_sql(self):
return True
# IMPLEMENTATION DETAILS ###############################################
@classmethod
def _macro_root(cls) -> List[str]:
return [cls.MACRO_ROOT]
@classmethod
def _template_root(cls, server: 's.Server') -> str:
return cls.TEMPLATE_ROOT
@property
def extended_vars(self):
return {
'tid': self.parent.oid
}
def _create_query_data(self) -> dict:
""" Provides data input for create script """
return {
"data": {
"name": self.name,
"cltype": self.cltype,
"schema": self.schema,
"table": self.table,
"displaytypname": self.displaytypname,
"attlen": self.attlen,
"attprecision": self.attprecision,
"hasSqrBracket": self.hasSqrBracket,
"collspcname": self.collspcname,
"attnotnull": self.attnotnull,
"defval": self.defval,
"description": self.description,
"attoptions": self.attoptions,
"attacl": self.attacl,
"seclabels": self.seclabels
},
"is_sql": self.is_sql
}
def _delete_query_data(self) -> dict:
""" Provides data input for delete script """
return {
"data": {
"schema": self.schema,
"table": self.table,
"name": self.name
}
}
def _update_query_data(self) -> dict:
""" Function that returns data for update script """
return {
"data": {
"name": self.name,
"schema": self.schema,
"table": self.table,
"cltype": self.cltype,
"attlen": self.attlen,
"attprecision": self.attprecision,
"collspcname": self.collspcname,
"defval": self.defval,
"attnotnull": self.attnotnull,
"attstattarget": self.attstattarget,
"attstorage": self.attstorage,
"description": self.description,
"attoptions": self.attoptions,
"attacl": self.attacl,
"seclabels": self.seclabels
},
"o_data": {
"name": "",
"cltype": "",
"attlen": "",
"attprecision": "",
"collspcname": "",
"defval": "",
"attnotnull": "",
"attstattarget": "",
"attstorage": ""
}
}
def get_length_precision(self, elemoid):
precision = False
length = False
typeval = ''
# Check against PGOID for specific type
if elemoid:
if elemoid in (1560, 1561, 1562, 1563, 1042, 1043, 1014, 1015):
typeval = 'L'
elif elemoid in (1083, 1114, 1115, 1183, 1184, 1185, 1186, 1187, 1266, 1270):
typeval = 'D'
elif elemoid in (1231, 1700):
typeval = 'P'
else:
typeval = ' '
# Set precision & length/min/max values
if typeval == 'P':
precision = True
if precision or typeval in ('L', 'D'):
length = True
return length, precision
def get_full_type(self, nsp, typname, isDup, numdims, typmod):
"""
Returns full type name with Length and Precision.
Args:
conn: Connection Object
condition: condition to restrict SQL statement
"""
schema = nsp if nsp is not None else ''
name = ''
array = ''
length = ''
# Above 7.4, format_type also sends the schema name if it's not included
# in the search_path, so we need to skip it in the typname
if typname.find(schema + '".') >= 0:
name = typname[len(schema) + 3]
elif typname.find(schema + '.') >= 0:
name = typname[len(schema) + 1]
else:
name = typname
if name.startswith('_'):
if not numdims:
numdims = 1
name = name[1:]
if name.endswith('[]'):
if not numdims:
numdims = 1
name = name[:-2]
if name.startswith('"') and name.endswith('"'):
name = name[1:-1]
if numdims > 0:
while numdims:
array += '[]'
numdims -= 1
if typmod != -1:
length = '('
if name == 'numeric':
_len = (typmod - 4) >> 16
_prec = (typmod - 4) & 0xffff
length += str(_len)
if (_prec):
length += ',' + str(_prec)
elif name == 'time' or \
name == 'timetz' or \
name == 'time without time zone' or \
name == 'time with time zone' or \
name == 'timestamp' or \
name == 'timestamptz' or \
name == 'timestamp without time zone' or \
name == 'timestamp with time zone' or \
name == 'bit' or \
name == 'bit varying' or \
name == 'varbit':
_prec = 0
_len = typmod
length += str(_len)
elif name == 'interval':
_prec = 0
_len = typmod & 0xffff
length += str(_len)
elif name == 'date':
# Clear length
length = ''
else:
_len = typmod - 4
_prec = 0
length += str(_len)
if len(length) > 0:
length += ')'
if name == 'char' and schema == 'pg_catalog':
return '"char"' + array
elif name == 'time with time zone':
return 'time' + length + ' with time zone' + array
elif name == 'time without time zone':
return 'time' + length + ' without time zone' + array
elif name == 'timestamp with time zone':
return 'timestamp' + length + ' with time zone' + array
elif name == 'timestamp without time zone':
return 'timestamp' + length + ' without time zone' + array
else:
return name + length + array
| 32.122056
| 127
| 0.571095
|
898112bdd8a8c8df86bf80e0a4af1aac3c592ec8
| 1,652
|
py
|
Python
|
services/nodemanager/tests/test_computenode.py
|
coolmaksat/arvados
|
5f571760d4b52426e39ae39d0ce5cb9b7cfb0add
|
[
"Apache-2.0"
] | 1
|
2017-11-15T13:16:38.000Z
|
2017-11-15T13:16:38.000Z
|
services/nodemanager/tests/test_computenode.py
|
coolmaksat/arvados
|
5f571760d4b52426e39ae39d0ce5cb9b7cfb0add
|
[
"Apache-2.0"
] | null | null | null |
services/nodemanager/tests/test_computenode.py
|
coolmaksat/arvados
|
5f571760d4b52426e39ae39d0ce5cb9b7cfb0add
|
[
"Apache-2.0"
] | 1
|
2020-09-02T08:37:54.000Z
|
2020-09-02T08:37:54.000Z
|
#!/usr/bin/env python
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: AGPL-3.0
from __future__ import absolute_import, print_function
import time
import unittest
import arvados.errors as arverror
import mock
import arvnodeman.computenode as cnode
from . import testutil
@mock.patch('time.time', return_value=1)
class ShutdownTimerTestCase(unittest.TestCase):
def test_two_length_window(self, time_mock):
timer = cnode.ShutdownTimer(time_mock.return_value, [8, 2])
self.assertEqual(481, timer.next_opening())
self.assertFalse(timer.window_open())
time_mock.return_value += 500
self.assertEqual(1081, timer.next_opening())
self.assertTrue(timer.window_open())
time_mock.return_value += 200
self.assertEqual(1081, timer.next_opening())
self.assertFalse(timer.window_open())
def test_three_length_window(self, time_mock):
timer = cnode.ShutdownTimer(time_mock.return_value, [6, 3, 1])
self.assertEqual(361, timer.next_opening())
self.assertFalse(timer.window_open())
time_mock.return_value += 400
self.assertEqual(961, timer.next_opening())
self.assertTrue(timer.window_open())
time_mock.return_value += 200
self.assertEqual(961, timer.next_opening())
self.assertFalse(timer.window_open())
class ArvadosTimestamp(unittest.TestCase):
def test_arvados_timestamp(self):
self.assertEqual(1527710178, cnode.arvados_timestamp('2018-05-30T19:56:18Z'))
self.assertEqual(1527710178.999371, cnode.arvados_timestamp('2018-05-30T19:56:18.999371Z'))
| 35.913043
| 99
| 0.719734
|
531dc76cf5dcad13788b752a5e7c1941eff99727
| 745
|
py
|
Python
|
services/core-api/app/api/now_applications/models/now_application_review_type.py
|
bcgov/mds
|
6c427a66a5edb4196222607291adef8fd6677038
|
[
"Apache-2.0"
] | 25
|
2018-07-09T19:04:37.000Z
|
2022-03-15T17:27:10.000Z
|
services/core-api/app/api/now_applications/models/now_application_review_type.py
|
areyeslo/mds
|
e8c38e593e09b78e2a57009c0d003d6c4bfa32e6
|
[
"Apache-2.0"
] | 983
|
2018-04-25T20:08:07.000Z
|
2022-03-31T21:45:20.000Z
|
services/core-api/app/api/now_applications/models/now_application_review_type.py
|
areyeslo/mds
|
e8c38e593e09b78e2a57009c0d003d6c4bfa32e6
|
[
"Apache-2.0"
] | 58
|
2018-05-15T22:35:50.000Z
|
2021-11-29T19:40:52.000Z
|
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.schema import FetchedValue
from sqlalchemy.ext.associationproxy import association_proxy
from app.api.utils.models_mixins import Base, AuditMixin
from app.extensions import db
class NOWApplicationReviewType(Base, AuditMixin):
__tablename__ = "now_application_review_type"
now_application_review_type_code = db.Column(db.String, primary_key=True)
description = db.Column(db.String, nullable=False)
active_ind = db.Column(db.Boolean, nullable=False, server_default=FetchedValue())
def __repr__(self):
return '<NOWApplicationPermitType %r>' % self.now_application_review_type_code
@classmethod
def get_all(cls):
return cls.query.all()
| 35.47619
| 86
| 0.783893
|
2ea0511cf75fdfcf256cc5daf7427e13c44fe1cb
| 3,252
|
py
|
Python
|
watertap3/watertap3/utils/design.py
|
avdudchenko/WaterTAP3
|
839fbd4d19ec096ac7629b7109d18ab907b88b8e
|
[
"BSD-3-Clause"
] | null | null | null |
watertap3/watertap3/utils/design.py
|
avdudchenko/WaterTAP3
|
839fbd4d19ec096ac7629b7109d18ab907b88b8e
|
[
"BSD-3-Clause"
] | 34
|
2021-06-25T17:54:12.000Z
|
2021-06-25T17:54:27.000Z
|
watertap3/watertap3/utils/design.py
|
avdudchenko/WaterTAP3
|
839fbd4d19ec096ac7629b7109d18ab907b88b8e
|
[
"BSD-3-Clause"
] | 4
|
2021-06-25T18:32:31.000Z
|
2022-03-24T20:24:18.000Z
|
from . import module_import
from .constituent_removal_water_recovery import create
from .mixer_wt3 import Mixer
from .source_wt3 import Source
__all__ = ['add_unit_process',
'add_water_source',
'add_splitter',
'add_mixer']
def add_unit_process(m=None, unit_process_name=None, unit_process_type=None, unit_process_kind=None):
up_module = module_import.get_module(unit_process_type)
unit_params = m.fs.pfd_dict[unit_process_name]['Parameter']
if unit_process_type == 'basic_unit':
setattr(m.fs, unit_process_name, up_module.UnitProcess(default={'property_package': m.fs.water}))
basic_unit_name = unit_params['unit_process_name']
m = create(m, basic_unit_name, unit_process_name)
else:
setattr(m.fs, unit_process_name, up_module.UnitProcess(default={'property_package': m.fs.water}))
m = create(m, unit_process_type, unit_process_name)
unit = getattr(m.fs, unit_process_name)
unit.unit_type = unit_process_type
unit.unit_name = unit_process_name
unit.unit_pretty_name = unit_process_name.replace('_', ' ').title().replace('Ro', 'RO').replace('Zld', 'ZLD').replace('Aop', 'AOP').replace('Uv', 'UV').replace('And', '&').replace('Sw', 'SW').replace('Gac', 'GAC').replace('Ph', 'pH').replace('Bc', 'BC').replace('Wwtp', 'WWTP')
unit.unit_kind = unit_process_kind
unit.unit_params = unit_params
unit.get_costing(unit_params=unit_params)
return m
def add_water_source(m=None, source_name=None, water_type=None, flow=None, link_to=None):
setattr(m.fs, source_name, Source(default={'property_package': m.fs.water}))
getattr(m.fs, source_name).set_source()
getattr(m.fs, source_name).flow_vol_in.fix(flow)
temp_source_df = m.fs.source_df[m.fs.source_df.water_type == water_type].copy()
train_constituent_list = list(getattr(m.fs, source_name).config.property_package.component_list)
for constituent_name in train_constituent_list:
if constituent_name in temp_source_df.index:
conc = temp_source_df.loc[constituent_name].value
getattr(m.fs, source_name).conc_mass_in[:, constituent_name].fix(conc)
else:
getattr(m.fs, source_name).conc_mass_in[:, constituent_name].fix(0)
getattr(m.fs, source_name).pressure_in.fix(1)
return m
def add_splitter(m=None, split_name=None, with_connection=False, outlet_list=None, outlet_fractions=None,
link_to=None, link_from=None, stream_name=None, unfix=False):
setattr(m.fs, split_name, Separator(default={
'property_package': m.fs.water,
'ideal_separation': False,
'outlet_list': outlet_list
}))
if unfix == True:
getattr(m.fs, split_name).split_fraction[0, key].unfix()
else:
for key in outlet_fractions.keys():
getattr(m.fs, split_name).split_fraction[0, key].fix(outlet_fractions[key])
return m
def add_mixer(m=None, mixer_name=None, with_connection=False, inlet_list=None,
link_to=None, link_from=None, stream_name=None):
setattr(m.fs, mixer_name, Mixer(default={
'property_package': m.fs.water,
'inlet_list': inlet_list
}))
return m
| 40.65
| 281
| 0.690959
|
67504b8da6b2b1f79aae0e7ff2334dc78ca55f38
| 1,364
|
py
|
Python
|
electrum_mona/plugins/digitalbitbox/qt.py
|
zcore-dev/electrum-mona
|
2beb0c9c7794e8b03d1725bae41ee8b792c57275
|
[
"MIT"
] | null | null | null |
electrum_mona/plugins/digitalbitbox/qt.py
|
zcore-dev/electrum-mona
|
2beb0c9c7794e8b03d1725bae41ee8b792c57275
|
[
"MIT"
] | null | null | null |
electrum_mona/plugins/digitalbitbox/qt.py
|
zcore-dev/electrum-mona
|
2beb0c9c7794e8b03d1725bae41ee8b792c57275
|
[
"MIT"
] | null | null | null |
from functools import partial
from electrum_mona.i18n import _
from electrum_mona.plugin import hook
from electrum_mona.wallet import Standard_Wallet
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from ..hw_wallet.plugin import only_hook_if_libraries_available
from .digitalbitbox import DigitalBitboxPlugin
class Plugin(DigitalBitboxPlugin, QtPluginBase):
icon_unpaired = "digitalbitbox_unpaired.png"
icon_paired = "digitalbitbox.png"
def create_handler(self, window):
return DigitalBitbox_Handler(window)
@only_hook_if_libraries_available
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) is not self.keystore_class:
return
if not self.is_mobile_paired():
return
if not keystore.is_p2pkh():
return
if len(addrs) == 1:
def show_address():
keystore.thread.add(partial(self.show_address, wallet, addrs[0], keystore))
menu.addAction(_("Show on {}").format(self.device), show_address)
class DigitalBitbox_Handler(QtHandlerBase):
def __init__(self, win):
super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
| 29.652174
| 92
| 0.676686
|
321b8fa22d829152f7a8f9479279d503c3a868ac
| 13,868
|
py
|
Python
|
pybind/slxos/v16r_1_00b/mpls_state/memory/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/mpls_state/memory/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/mpls_state/memory/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import pools
import stats
class memory(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls-operational - based on the path /mpls-state/memory. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MPLS memory information
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__total_non_pool_memory','__pools','__stats',)
_yang_name = 'memory'
_rest_name = 'memory'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__pools = YANGDynClass(base=YANGListType("pool_index",pools.pools, yang_name="pools", rest_name="pools", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='pool-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}), is_container='list', yang_name="pools", rest_name="pools", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
self.__total_non_pool_memory = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="total-non-pool-memory", rest_name="total-non-pool-memory", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__stats = YANGDynClass(base=YANGListType("mem_stats_index",stats.stats, yang_name="stats", rest_name="stats", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mem-stats-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}), is_container='list', yang_name="stats", rest_name="stats", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-state', u'memory']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'mpls-state', u'memory']
def _get_total_non_pool_memory(self):
"""
Getter method for total_non_pool_memory, mapped from YANG variable /mpls_state/memory/total_non_pool_memory (uint32)
YANG Description: Total non pool memory
"""
return self.__total_non_pool_memory
def _set_total_non_pool_memory(self, v, load=False):
"""
Setter method for total_non_pool_memory, mapped from YANG variable /mpls_state/memory/total_non_pool_memory (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_total_non_pool_memory is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_total_non_pool_memory() directly.
YANG Description: Total non pool memory
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="total-non-pool-memory", rest_name="total-non-pool-memory", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """total_non_pool_memory must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="total-non-pool-memory", rest_name="total-non-pool-memory", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__total_non_pool_memory = t
if hasattr(self, '_set'):
self._set()
def _unset_total_non_pool_memory(self):
self.__total_non_pool_memory = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="total-non-pool-memory", rest_name="total-non-pool-memory", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_pools(self):
"""
Getter method for pools, mapped from YANG variable /mpls_state/memory/pools (list)
YANG Description: Memory pools
"""
return self.__pools
def _set_pools(self, v, load=False):
"""
Setter method for pools, mapped from YANG variable /mpls_state/memory/pools (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_pools is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pools() directly.
YANG Description: Memory pools
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("pool_index",pools.pools, yang_name="pools", rest_name="pools", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='pool-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}), is_container='list', yang_name="pools", rest_name="pools", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """pools must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("pool_index",pools.pools, yang_name="pools", rest_name="pools", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='pool-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}), is_container='list', yang_name="pools", rest_name="pools", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)""",
})
self.__pools = t
if hasattr(self, '_set'):
self._set()
def _unset_pools(self):
self.__pools = YANGDynClass(base=YANGListType("pool_index",pools.pools, yang_name="pools", rest_name="pools", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='pool-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}), is_container='list', yang_name="pools", rest_name="pools", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-pools', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
def _get_stats(self):
"""
Getter method for stats, mapped from YANG variable /mpls_state/memory/stats (list)
YANG Description: 1
"""
return self.__stats
def _set_stats(self, v, load=False):
"""
Setter method for stats, mapped from YANG variable /mpls_state/memory/stats (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_stats is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_stats() directly.
YANG Description: 1
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("mem_stats_index",stats.stats, yang_name="stats", rest_name="stats", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mem-stats-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}), is_container='list', yang_name="stats", rest_name="stats", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """stats must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("mem_stats_index",stats.stats, yang_name="stats", rest_name="stats", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mem-stats-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}), is_container='list', yang_name="stats", rest_name="stats", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)""",
})
self.__stats = t
if hasattr(self, '_set'):
self._set()
def _unset_stats(self):
self.__stats = YANGDynClass(base=YANGListType("mem_stats_index",stats.stats, yang_name="stats", rest_name="stats", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mem-stats-index', extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}), is_container='list', yang_name="stats", rest_name="stats", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-mem-stats', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
total_non_pool_memory = __builtin__.property(_get_total_non_pool_memory)
pools = __builtin__.property(_get_pools)
stats = __builtin__.property(_get_stats)
_pyangbind_elements = {'total_non_pool_memory': total_non_pool_memory, 'pools': pools, 'stats': stats, }
| 66.354067
| 736
| 0.732478
|
79807aee70733c97d1208306d4d0c21fcb0ecf4f
| 3,499
|
py
|
Python
|
flocker/common/test/test_interface.py
|
wallnerryan/flocker-profiles
|
bcd3ced8edf4af86a68070ff6a714c45f9f4913b
|
[
"Apache-2.0"
] | null | null | null |
flocker/common/test/test_interface.py
|
wallnerryan/flocker-profiles
|
bcd3ced8edf4af86a68070ff6a714c45f9f4913b
|
[
"Apache-2.0"
] | null | null | null |
flocker/common/test/test_interface.py
|
wallnerryan/flocker-profiles
|
bcd3ced8edf4af86a68070ff6a714c45f9f4913b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright ClusterHQ Inc. See LICENSE file for details.
"""
Tests for ``flocker.common._interface``.
"""
from twisted.trial.unittest import SynchronousTestCase
from eliot.testing import (
assertHasMessage, capture_logging
)
from eliot import Field, MessageType
from zope.interface import Interface, implementer
from .. import interface_decorator
# Eliot structures for testing ``interface_decorator``.
METHOD = Field.for_types(
u"method", [unicode],
u"The name of the decorated method.")
TEST_MESSAGE = MessageType(u"flocker:common:test:interface:message",
[METHOD])
TEST_EXCEPTION = MessageType(u"flocker:common:test:interface:exception",
[METHOD])
class IDummy(Interface):
"""
Dummy interface with two test methods.
"""
def return_method():
"""
Return something.
"""
def raise_method():
"""
Raise something.
"""
@implementer(IDummy)
class Dummy(object):
"""
Dummy class that implements ``IDummy`` interface.
Implements two methods: one to return an object, and the second
to raise an ``Exception``.
"""
def __init__(self, result):
self._result = result
def return_method(self):
return self._result
def raise_method(self):
raise self._result
def _test_logged_method(method_name, original_name):
"""
Decorator for logging message to Eliot logger.
- Log before calling given ``method_name``.
- Log if ``method_name`` resulted in an ``Exception``.
"""
def _run_with_logging(self, *args, **kwargs):
original = getattr(self, original_name)
method = getattr(original, method_name)
try:
TEST_MESSAGE(method=method_name.decode("ascii")).write()
return method(*args, **kwargs)
except Exception:
TEST_EXCEPTION(method=method_name.decode("ascii")).write()
raise
return _run_with_logging
def test_decorator(interface, original):
"""
Consumer of ``interface_decorator``.
"""
return interface_decorator(
"test_decorator",
interface,
_test_logged_method,
original,
)
@test_decorator(IDummy, "_dummy")
class LoggingDummy(object):
"""
Decorated class corresponding to ``Dummy`` object.
"""
def __init__(self, dummy):
self._dummy = dummy
class InterfaceDecoratorTests(SynchronousTestCase):
"""
Tests for ``interface_decorator``.
"""
@capture_logging(
assertHasMessage,
TEST_MESSAGE, {
"method": u"return_method",
},
)
def test_return(self, logger):
"""
A decorated method returns the value returned by the original method,
and logs expected text to Eliot.
"""
result = object()
logging_dummy = LoggingDummy(Dummy(result))
self.assertIs(result, logging_dummy.return_method())
@capture_logging(
assertHasMessage,
TEST_EXCEPTION, {
"method": u"raise_method",
},
)
def test_raise(self, logger):
"""
A decorated method raises the same exception raised by the original
method, and logs expected text to Eliot.
"""
result = ValueError("Things.")
logging_dummy = LoggingDummy(Dummy(result))
exception = self.assertRaises(ValueError, logging_dummy.raise_method)
self.assertIs(result, exception)
| 25.918519
| 77
| 0.632181
|
338a961994b666a1bf2ecefb49fb2852baa728d3
| 875
|
py
|
Python
|
mysite/website/urls.py
|
aBadHorse/malachifrancis.me
|
c3114a6ff622e23f41fa040e32665a33efd58aa4
|
[
"MIT"
] | null | null | null |
mysite/website/urls.py
|
aBadHorse/malachifrancis.me
|
c3114a6ff622e23f41fa040e32665a33efd58aa4
|
[
"MIT"
] | null | null | null |
mysite/website/urls.py
|
aBadHorse/malachifrancis.me
|
c3114a6ff622e23f41fa040e32665a33efd58aa4
|
[
"MIT"
] | null | null | null |
from django.urls import path
from django.contrib import admin
from django.contrib.auth import views as auth_views
from .views import *
app_name = 'website'
urlpatterns = [
path('', HomeView.as_view()),
path('home/', HomeView.as_view(), name='home'),
path('about/', AboutView.as_view(), name='about'),
path('about/resume', ResumeView.as_view(), name='resume'),
path('art/', ArtView.as_view(), name='art'),
path('dev/', DevView.as_view(), name='dev'),
path('dev/deckalyzer', DeckalyzerView.as_view(), name='deckalyzer'),
path('music/', MusicView.as_view(), name='music'),
path('login/', LoginUserView.as_view(), name='login'),
path('logout/', auth_views.LogoutView, {'next_page': '/'}, name='logout'),
path('register/', RegisterUserView.as_view(), name='register'),
path('account/', UpdateUserView.as_view(), name='account'),
]
| 41.666667
| 78
| 0.665143
|
3fcedb58ec18a62ba3b97cc1768e87cb668135bd
| 26,431
|
py
|
Python
|
Lib/test/test_asyncore.py
|
gerph/cpython
|
98813cb03c2371789669c3d8debf8fca2a344de9
|
[
"CNRI-Python-GPL-Compatible"
] | 11,058
|
2018-05-29T07:40:06.000Z
|
2022-03-31T11:38:42.000Z
|
Lib/test/test_asyncore.py
|
gerph/cpython
|
98813cb03c2371789669c3d8debf8fca2a344de9
|
[
"CNRI-Python-GPL-Compatible"
] | 2,105
|
2018-06-01T10:07:16.000Z
|
2022-03-31T14:56:42.000Z
|
Lib/test/test_asyncore.py
|
gerph/cpython
|
98813cb03c2371789669c3d8debf8fca2a344de9
|
[
"CNRI-Python-GPL-Compatible"
] | 914
|
2018-07-27T09:36:14.000Z
|
2022-03-31T19:56:34.000Z
|
import asyncore
import unittest
import select
import os
import socket
import sys
import time
import errno
import struct
import threading
from test import support
from io import BytesIO
if support.PGO:
raise unittest.SkipTest("test is not helpful for PGO")
TIMEOUT = 3
HAS_UNIX_SOCKETS = hasattr(socket, 'AF_UNIX')
class dummysocket:
def __init__(self):
self.closed = False
def close(self):
self.closed = True
def fileno(self):
return 42
class dummychannel:
def __init__(self):
self.socket = dummysocket()
def close(self):
self.socket.close()
class exitingdummy:
def __init__(self):
pass
def handle_read_event(self):
raise asyncore.ExitNow()
handle_write_event = handle_read_event
handle_close = handle_read_event
handle_expt_event = handle_read_event
class crashingdummy:
def __init__(self):
self.error_handled = False
def handle_read_event(self):
raise Exception()
handle_write_event = handle_read_event
handle_close = handle_read_event
handle_expt_event = handle_read_event
def handle_error(self):
self.error_handled = True
# used when testing senders; just collects what it gets until newline is sent
def capture_server(evt, buf, serv):
try:
serv.listen()
conn, addr = serv.accept()
except socket.timeout:
pass
else:
n = 200
start = time.monotonic()
while n > 0 and time.monotonic() - start < 3.0:
r, w, e = select.select([conn], [], [], 0.1)
if r:
n -= 1
data = conn.recv(10)
# keep everything except for the newline terminator
buf.write(data.replace(b'\n', b''))
if b'\n' in data:
break
time.sleep(0.01)
conn.close()
finally:
serv.close()
evt.set()
def bind_af_aware(sock, addr):
"""Helper function to bind a socket according to its family."""
if HAS_UNIX_SOCKETS and sock.family == socket.AF_UNIX:
# Make sure the path doesn't exist.
support.unlink(addr)
support.bind_unix_socket(sock, addr)
else:
sock.bind(addr)
class HelperFunctionTests(unittest.TestCase):
def test_readwriteexc(self):
# Check exception handling behavior of read, write and _exception
# check that ExitNow exceptions in the object handler method
# bubbles all the way up through asyncore read/write/_exception calls
tr1 = exitingdummy()
self.assertRaises(asyncore.ExitNow, asyncore.read, tr1)
self.assertRaises(asyncore.ExitNow, asyncore.write, tr1)
self.assertRaises(asyncore.ExitNow, asyncore._exception, tr1)
# check that an exception other than ExitNow in the object handler
# method causes the handle_error method to get called
tr2 = crashingdummy()
asyncore.read(tr2)
self.assertEqual(tr2.error_handled, True)
tr2 = crashingdummy()
asyncore.write(tr2)
self.assertEqual(tr2.error_handled, True)
tr2 = crashingdummy()
asyncore._exception(tr2)
self.assertEqual(tr2.error_handled, True)
# asyncore.readwrite uses constants in the select module that
# are not present in Windows systems (see this thread:
# http://mail.python.org/pipermail/python-list/2001-October/109973.html)
# These constants should be present as long as poll is available
@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required')
def test_readwrite(self):
# Check that correct methods are called by readwrite()
attributes = ('read', 'expt', 'write', 'closed', 'error_handled')
expected = (
(select.POLLIN, 'read'),
(select.POLLPRI, 'expt'),
(select.POLLOUT, 'write'),
(select.POLLERR, 'closed'),
(select.POLLHUP, 'closed'),
(select.POLLNVAL, 'closed'),
)
class testobj:
def __init__(self):
self.read = False
self.write = False
self.closed = False
self.expt = False
self.error_handled = False
def handle_read_event(self):
self.read = True
def handle_write_event(self):
self.write = True
def handle_close(self):
self.closed = True
def handle_expt_event(self):
self.expt = True
def handle_error(self):
self.error_handled = True
for flag, expectedattr in expected:
tobj = testobj()
self.assertEqual(getattr(tobj, expectedattr), False)
asyncore.readwrite(tobj, flag)
# Only the attribute modified by the routine we expect to be
# called should be True.
for attr in attributes:
self.assertEqual(getattr(tobj, attr), attr==expectedattr)
# check that ExitNow exceptions in the object handler method
# bubbles all the way up through asyncore readwrite call
tr1 = exitingdummy()
self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag)
# check that an exception other than ExitNow in the object handler
# method causes the handle_error method to get called
tr2 = crashingdummy()
self.assertEqual(tr2.error_handled, False)
asyncore.readwrite(tr2, flag)
self.assertEqual(tr2.error_handled, True)
def test_closeall(self):
self.closeall_check(False)
def test_closeall_default(self):
self.closeall_check(True)
def closeall_check(self, usedefault):
# Check that close_all() closes everything in a given map
l = []
testmap = {}
for i in range(10):
c = dummychannel()
l.append(c)
self.assertEqual(c.socket.closed, False)
testmap[i] = c
if usedefault:
socketmap = asyncore.socket_map
try:
asyncore.socket_map = testmap
asyncore.close_all()
finally:
testmap, asyncore.socket_map = asyncore.socket_map, socketmap
else:
asyncore.close_all(testmap)
self.assertEqual(len(testmap), 0)
for c in l:
self.assertEqual(c.socket.closed, True)
def test_compact_traceback(self):
try:
raise Exception("I don't like spam!")
except:
real_t, real_v, real_tb = sys.exc_info()
r = asyncore.compact_traceback()
else:
self.fail("Expected exception")
(f, function, line), t, v, info = r
self.assertEqual(os.path.split(f)[-1], 'test_asyncore.py')
self.assertEqual(function, 'test_compact_traceback')
self.assertEqual(t, real_t)
self.assertEqual(v, real_v)
self.assertEqual(info, '[%s|%s|%s]' % (f, function, line))
class DispatcherTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
asyncore.close_all()
def test_basic(self):
d = asyncore.dispatcher()
self.assertEqual(d.readable(), True)
self.assertEqual(d.writable(), True)
def test_repr(self):
d = asyncore.dispatcher()
self.assertEqual(repr(d), '<asyncore.dispatcher at %#x>' % id(d))
def test_log(self):
d = asyncore.dispatcher()
# capture output of dispatcher.log() (to stderr)
l1 = "Lovely spam! Wonderful spam!"
l2 = "I don't like spam!"
with support.captured_stderr() as stderr:
d.log(l1)
d.log(l2)
lines = stderr.getvalue().splitlines()
self.assertEqual(lines, ['log: %s' % l1, 'log: %s' % l2])
def test_log_info(self):
d = asyncore.dispatcher()
# capture output of dispatcher.log_info() (to stdout via print)
l1 = "Have you got anything without spam?"
l2 = "Why can't she have egg bacon spam and sausage?"
l3 = "THAT'S got spam in it!"
with support.captured_stdout() as stdout:
d.log_info(l1, 'EGGS')
d.log_info(l2)
d.log_info(l3, 'SPAM')
lines = stdout.getvalue().splitlines()
expected = ['EGGS: %s' % l1, 'info: %s' % l2, 'SPAM: %s' % l3]
self.assertEqual(lines, expected)
def test_unhandled(self):
d = asyncore.dispatcher()
d.ignore_log_types = ()
# capture output of dispatcher.log_info() (to stdout via print)
with support.captured_stdout() as stdout:
d.handle_expt()
d.handle_read()
d.handle_write()
d.handle_connect()
lines = stdout.getvalue().splitlines()
expected = ['warning: unhandled incoming priority event',
'warning: unhandled read event',
'warning: unhandled write event',
'warning: unhandled connect event']
self.assertEqual(lines, expected)
def test_strerror(self):
# refers to bug #8573
err = asyncore._strerror(errno.EPERM)
if hasattr(os, 'strerror'):
self.assertEqual(err, os.strerror(errno.EPERM))
err = asyncore._strerror(-1)
self.assertTrue(err != "")
class dispatcherwithsend_noread(asyncore.dispatcher_with_send):
def readable(self):
return False
def handle_connect(self):
pass
class DispatcherWithSendTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
asyncore.close_all()
@support.reap_threads
def test_send(self):
evt = threading.Event()
sock = socket.socket()
sock.settimeout(3)
port = support.bind_port(sock)
cap = BytesIO()
args = (evt, cap, sock)
t = threading.Thread(target=capture_server, args=args)
t.start()
try:
# wait a little longer for the server to initialize (it sometimes
# refuses connections on slow machines without this wait)
time.sleep(0.2)
data = b"Suppose there isn't a 16-ton weight?"
d = dispatcherwithsend_noread()
d.create_socket()
d.connect((support.HOST, port))
# give time for socket to connect
time.sleep(0.1)
d.send(data)
d.send(data)
d.send(b'\n')
n = 1000
while d.out_buffer and n > 0:
asyncore.poll()
n -= 1
evt.wait()
self.assertEqual(cap.getvalue(), data*2)
finally:
support.join_thread(t, timeout=TIMEOUT)
@unittest.skipUnless(hasattr(asyncore, 'file_wrapper'),
'asyncore.file_wrapper required')
class FileWrapperTest(unittest.TestCase):
def setUp(self):
self.d = b"It's not dead, it's sleeping!"
with open(support.TESTFN, 'wb') as file:
file.write(self.d)
def tearDown(self):
support.unlink(support.TESTFN)
def test_recv(self):
fd = os.open(support.TESTFN, os.O_RDONLY)
w = asyncore.file_wrapper(fd)
os.close(fd)
self.assertNotEqual(w.fd, fd)
self.assertNotEqual(w.fileno(), fd)
self.assertEqual(w.recv(13), b"It's not dead")
self.assertEqual(w.read(6), b", it's")
w.close()
self.assertRaises(OSError, w.read, 1)
def test_send(self):
d1 = b"Come again?"
d2 = b"I want to buy some cheese."
fd = os.open(support.TESTFN, os.O_WRONLY | os.O_APPEND)
w = asyncore.file_wrapper(fd)
os.close(fd)
w.write(d1)
w.send(d2)
w.close()
with open(support.TESTFN, 'rb') as file:
self.assertEqual(file.read(), self.d + d1 + d2)
@unittest.skipUnless(hasattr(asyncore, 'file_dispatcher'),
'asyncore.file_dispatcher required')
def test_dispatcher(self):
fd = os.open(support.TESTFN, os.O_RDONLY)
data = []
class FileDispatcher(asyncore.file_dispatcher):
def handle_read(self):
data.append(self.recv(29))
s = FileDispatcher(fd)
os.close(fd)
asyncore.loop(timeout=0.01, use_poll=True, count=2)
self.assertEqual(b"".join(data), self.d)
def test_resource_warning(self):
# Issue #11453
fd = os.open(support.TESTFN, os.O_RDONLY)
f = asyncore.file_wrapper(fd)
os.close(fd)
with support.check_warnings(('', ResourceWarning)):
f = None
support.gc_collect()
def test_close_twice(self):
fd = os.open(support.TESTFN, os.O_RDONLY)
f = asyncore.file_wrapper(fd)
os.close(fd)
os.close(f.fd) # file_wrapper dupped fd
with self.assertRaises(OSError):
f.close()
self.assertEqual(f.fd, -1)
# calling close twice should not fail
f.close()
class BaseTestHandler(asyncore.dispatcher):
def __init__(self, sock=None):
asyncore.dispatcher.__init__(self, sock)
self.flag = False
def handle_accept(self):
raise Exception("handle_accept not supposed to be called")
def handle_accepted(self):
raise Exception("handle_accepted not supposed to be called")
def handle_connect(self):
raise Exception("handle_connect not supposed to be called")
def handle_expt(self):
raise Exception("handle_expt not supposed to be called")
def handle_close(self):
raise Exception("handle_close not supposed to be called")
def handle_error(self):
raise
class BaseServer(asyncore.dispatcher):
"""A server which listens on an address and dispatches the
connection to a handler.
"""
def __init__(self, family, addr, handler=BaseTestHandler):
asyncore.dispatcher.__init__(self)
self.create_socket(family)
self.set_reuse_addr()
bind_af_aware(self.socket, addr)
self.listen(5)
self.handler = handler
@property
def address(self):
return self.socket.getsockname()
def handle_accepted(self, sock, addr):
self.handler(sock)
def handle_error(self):
raise
class BaseClient(BaseTestHandler):
def __init__(self, family, address):
BaseTestHandler.__init__(self)
self.create_socket(family)
self.connect(address)
def handle_connect(self):
pass
class BaseTestAPI:
def tearDown(self):
asyncore.close_all(ignore_all=True)
def loop_waiting_for_flag(self, instance, timeout=5):
timeout = float(timeout) / 100
count = 100
while asyncore.socket_map and count > 0:
asyncore.loop(timeout=0.01, count=1, use_poll=self.use_poll)
if instance.flag:
return
count -= 1
time.sleep(timeout)
self.fail("flag not set")
def test_handle_connect(self):
# make sure handle_connect is called on connect()
class TestClient(BaseClient):
def handle_connect(self):
self.flag = True
server = BaseServer(self.family, self.addr)
client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_accept(self):
# make sure handle_accept() is called when a client connects
class TestListener(BaseTestHandler):
def __init__(self, family, addr):
BaseTestHandler.__init__(self)
self.create_socket(family)
bind_af_aware(self.socket, addr)
self.listen(5)
self.address = self.socket.getsockname()
def handle_accept(self):
self.flag = True
server = TestListener(self.family, self.addr)
client = BaseClient(self.family, server.address)
self.loop_waiting_for_flag(server)
def test_handle_accepted(self):
# make sure handle_accepted() is called when a client connects
class TestListener(BaseTestHandler):
def __init__(self, family, addr):
BaseTestHandler.__init__(self)
self.create_socket(family)
bind_af_aware(self.socket, addr)
self.listen(5)
self.address = self.socket.getsockname()
def handle_accept(self):
asyncore.dispatcher.handle_accept(self)
def handle_accepted(self, sock, addr):
sock.close()
self.flag = True
server = TestListener(self.family, self.addr)
client = BaseClient(self.family, server.address)
self.loop_waiting_for_flag(server)
def test_handle_read(self):
# make sure handle_read is called on data received
class TestClient(BaseClient):
def handle_read(self):
self.flag = True
class TestHandler(BaseTestHandler):
def __init__(self, conn):
BaseTestHandler.__init__(self, conn)
self.send(b'x' * 1024)
server = BaseServer(self.family, self.addr, TestHandler)
client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_write(self):
# make sure handle_write is called
class TestClient(BaseClient):
def handle_write(self):
self.flag = True
server = BaseServer(self.family, self.addr)
client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_close(self):
# make sure handle_close is called when the other end closes
# the connection
class TestClient(BaseClient):
def handle_read(self):
# in order to make handle_close be called we are supposed
# to make at least one recv() call
self.recv(1024)
def handle_close(self):
self.flag = True
self.close()
class TestHandler(BaseTestHandler):
def __init__(self, conn):
BaseTestHandler.__init__(self, conn)
self.close()
server = BaseServer(self.family, self.addr, TestHandler)
client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_close_after_conn_broken(self):
# Check that ECONNRESET/EPIPE is correctly handled (issues #5661 and
# #11265).
data = b'\0' * 128
class TestClient(BaseClient):
def handle_write(self):
self.send(data)
def handle_close(self):
self.flag = True
self.close()
def handle_expt(self):
self.flag = True
self.close()
class TestHandler(BaseTestHandler):
def handle_read(self):
self.recv(len(data))
self.close()
def writable(self):
return False
server = BaseServer(self.family, self.addr, TestHandler)
client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
@unittest.skipIf(sys.platform.startswith("sunos"),
"OOB support is broken on Solaris")
def test_handle_expt(self):
# Make sure handle_expt is called on OOB data received.
# Note: this might fail on some platforms as OOB data is
# tenuously supported and rarely used.
if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
self.skipTest("Not applicable to AF_UNIX sockets.")
if sys.platform == "darwin" and self.use_poll:
self.skipTest("poll may fail on macOS; see issue #28087")
class TestClient(BaseClient):
def handle_expt(self):
self.socket.recv(1024, socket.MSG_OOB)
self.flag = True
class TestHandler(BaseTestHandler):
def __init__(self, conn):
BaseTestHandler.__init__(self, conn)
self.socket.send(bytes(chr(244), 'latin-1'), socket.MSG_OOB)
server = BaseServer(self.family, self.addr, TestHandler)
client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_error(self):
class TestClient(BaseClient):
def handle_write(self):
1.0 / 0
def handle_error(self):
self.flag = True
try:
raise
except ZeroDivisionError:
pass
else:
raise Exception("exception not raised")
server = BaseServer(self.family, self.addr)
client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_connection_attributes(self):
server = BaseServer(self.family, self.addr)
client = BaseClient(self.family, server.address)
# we start disconnected
self.assertFalse(server.connected)
self.assertTrue(server.accepting)
# this can't be taken for granted across all platforms
#self.assertFalse(client.connected)
self.assertFalse(client.accepting)
# execute some loops so that client connects to server
asyncore.loop(timeout=0.01, use_poll=self.use_poll, count=100)
self.assertFalse(server.connected)
self.assertTrue(server.accepting)
self.assertTrue(client.connected)
self.assertFalse(client.accepting)
# disconnect the client
client.close()
self.assertFalse(server.connected)
self.assertTrue(server.accepting)
self.assertFalse(client.connected)
self.assertFalse(client.accepting)
# stop serving
server.close()
self.assertFalse(server.connected)
self.assertFalse(server.accepting)
def test_create_socket(self):
s = asyncore.dispatcher()
s.create_socket(self.family)
self.assertEqual(s.socket.type, socket.SOCK_STREAM)
self.assertEqual(s.socket.family, self.family)
self.assertEqual(s.socket.gettimeout(), 0)
self.assertFalse(s.socket.get_inheritable())
def test_bind(self):
if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
self.skipTest("Not applicable to AF_UNIX sockets.")
s1 = asyncore.dispatcher()
s1.create_socket(self.family)
s1.bind(self.addr)
s1.listen(5)
port = s1.socket.getsockname()[1]
s2 = asyncore.dispatcher()
s2.create_socket(self.family)
# EADDRINUSE indicates the socket was correctly bound
self.assertRaises(OSError, s2.bind, (self.addr[0], port))
def test_set_reuse_addr(self):
if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
self.skipTest("Not applicable to AF_UNIX sockets.")
with socket.socket(self.family) as sock:
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except OSError:
unittest.skip("SO_REUSEADDR not supported on this platform")
else:
# if SO_REUSEADDR succeeded for sock we expect asyncore
# to do the same
s = asyncore.dispatcher(socket.socket(self.family))
self.assertFalse(s.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR))
s.socket.close()
s.create_socket(self.family)
s.set_reuse_addr()
self.assertTrue(s.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR))
@support.reap_threads
def test_quick_connect(self):
# see: http://bugs.python.org/issue10340
if self.family not in (socket.AF_INET, getattr(socket, "AF_INET6", object())):
self.skipTest("test specific to AF_INET and AF_INET6")
server = BaseServer(self.family, self.addr)
# run the thread 500 ms: the socket should be connected in 200 ms
t = threading.Thread(target=lambda: asyncore.loop(timeout=0.1,
count=5))
t.start()
try:
with socket.socket(self.family, socket.SOCK_STREAM) as s:
s.settimeout(.2)
s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER,
struct.pack('ii', 1, 0))
try:
s.connect(server.address)
except OSError:
pass
finally:
support.join_thread(t, timeout=TIMEOUT)
class TestAPI_UseIPv4Sockets(BaseTestAPI):
family = socket.AF_INET
addr = (support.HOST, 0)
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 support required')
class TestAPI_UseIPv6Sockets(BaseTestAPI):
family = socket.AF_INET6
addr = (support.HOSTv6, 0)
@unittest.skipUnless(HAS_UNIX_SOCKETS, 'Unix sockets required')
class TestAPI_UseUnixSockets(BaseTestAPI):
if HAS_UNIX_SOCKETS:
family = socket.AF_UNIX
addr = support.TESTFN
def tearDown(self):
support.unlink(self.addr)
BaseTestAPI.tearDown(self)
class TestAPI_UseIPv4Select(TestAPI_UseIPv4Sockets, unittest.TestCase):
use_poll = False
@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required')
class TestAPI_UseIPv4Poll(TestAPI_UseIPv4Sockets, unittest.TestCase):
use_poll = True
class TestAPI_UseIPv6Select(TestAPI_UseIPv6Sockets, unittest.TestCase):
use_poll = False
@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required')
class TestAPI_UseIPv6Poll(TestAPI_UseIPv6Sockets, unittest.TestCase):
use_poll = True
class TestAPI_UseUnixSocketsSelect(TestAPI_UseUnixSockets, unittest.TestCase):
use_poll = False
@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required')
class TestAPI_UseUnixSocketsPoll(TestAPI_UseUnixSockets, unittest.TestCase):
use_poll = True
if __name__ == "__main__":
unittest.main()
| 31.653892
| 86
| 0.603382
|
0351209b488725764ca66d24df63b7a666858c71
| 1,515
|
py
|
Python
|
55661085-detect-differences-between-images-opencv/detect_differences_between_images.py
|
nathancy/stackoverflow
|
e9e2e2b8fba61e41526638a13ac7ada6de2d7560
|
[
"MIT"
] | 3
|
2019-09-18T10:45:20.000Z
|
2021-09-18T08:36:49.000Z
|
55661085-detect-differences-between-images-opencv/detect_differences_between_images.py
|
nathancy/stackoverflow
|
e9e2e2b8fba61e41526638a13ac7ada6de2d7560
|
[
"MIT"
] | 1
|
2020-03-19T15:49:31.000Z
|
2020-03-30T14:54:03.000Z
|
55661085-detect-differences-between-images-opencv/detect_differences_between_images.py
|
nathancy/stackoverflow
|
e9e2e2b8fba61e41526638a13ac7ada6de2d7560
|
[
"MIT"
] | 1
|
2021-04-08T19:30:42.000Z
|
2021-04-08T19:30:42.000Z
|
from skimage.measure import compare_ssim
import cv2
before = cv2.imread('base.png')
after = cv2.imread('base2.png')
# Convert images to grayscale
before_gray = cv2.cvtColor(before, cv2.COLOR_BGR2GRAY)
after_gray = cv2.cvtColor(after, cv2.COLOR_BGR2GRAY)
# Compute SSIM between two images
(score, diff) = compare_ssim(before_gray, after_gray, full=True)
# The diff image contains the actual image differences between the two images
# and is represented as a floating point data type in the range [0,1]
# so we must convert the array to 8-bit unsigned integers in the range
# [0,255] before we can use it with OpenCV
diff = (diff * 255).astype("uint8")
# Threshold the difference image, followed by finding contours to
# obtain the regions of the two input images that differ
thresh = cv2.threshold(diff, 0, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU)[1]
contours = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
contours = contours[0] if len(contours) == 2 else contours[1]
contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours]
# The largest contour should be the new detected difference
if len(contour_sizes) > 0:
largest_contour = max(contour_sizes, key=lambda x: x[0])[1]
x,y,w,h = cv2.boundingRect(largest_contour)
cv2.rectangle(before, (x, y), (x + w, y + h), (36,255,12), 2)
cv2.rectangle(after, (x, y), (x + w, y + h), (36,255,12), 2)
cv2.imshow('before', before)
cv2.imshow('after', after)
cv2.imshow('diff',diff)
cv2.waitKey(0)
| 39.868421
| 86
| 0.735314
|
9cdc5160d8d8a8953fab994c184a637d70e2dce2
| 56
|
py
|
Python
|
helpers/__init__.py
|
philipperemy/wavenet
|
a10bb3cdf924df5b77a56c56e70c853c1e614327
|
[
"MIT"
] | 21
|
2016-11-18T03:06:10.000Z
|
2020-03-21T14:02:01.000Z
|
helpers/__init__.py
|
afcarl/wavenet-philipperemy
|
a10bb3cdf924df5b77a56c56e70c853c1e614327
|
[
"MIT"
] | null | null | null |
helpers/__init__.py
|
afcarl/wavenet-philipperemy
|
a10bb3cdf924df5b77a56c56e70c853c1e614327
|
[
"MIT"
] | 6
|
2016-11-24T02:44:37.000Z
|
2021-10-15T11:52:19.000Z
|
from .file_logger import FileLogger
from .utils import *
| 28
| 35
| 0.821429
|
170fbe028bbbf4cb6a94c4386c78e51a4a96d9bc
| 4,072
|
py
|
Python
|
python/tc2py.py
|
dhlee4/Tinkercell_new
|
c4d1848bbb905f0e1f9e011837268ac80aff8711
|
[
"BSD-3-Clause"
] | null | null | null |
python/tc2py.py
|
dhlee4/Tinkercell_new
|
c4d1848bbb905f0e1f9e011837268ac80aff8711
|
[
"BSD-3-Clause"
] | null | null | null |
python/tc2py.py
|
dhlee4/Tinkercell_new
|
c4d1848bbb905f0e1f9e011837268ac80aff8711
|
[
"BSD-3-Clause"
] | null | null | null |
from tinkercell import *
def toItems(array):
n = len(array);
A = tc_createItemsArray(n);
for i in range(0, n):
tc_setItem(A, i, array[i]);
return A;
def fromItems(array):
n = array.length;
A = range(0,n);
for i in range(0, n):
A[i] = tc_getItem(array,i);
#tc_deleteItemsArray(array);
return A;
def toStrings(array):
n = len(array);
A = tc_createStringsArray(n);
for i in range(0, n):
tc_setString(A, i, array[i]);
return A;
def fromStrings(array):
n = array.length;
A = range(0,n);
for i in range(0, n):
A[i] = tc_getString(array,i);
#tc_deleteStringsArray(array);
return A;
def fromMatrix(matrix, row_wise = False):
n = matrix.rows;
m = matrix.cols;
cols = fromStrings(matrix.colnames);
rows = fromStrings(matrix.rownames);
if row_wise:
A = range(0,n);
for i in range(0, n):
A[i] = range(0,m);
for j in range(0,m):
A[i][j] = tc_getMatrixValue(matrix,i,j);
else:
A = range(0,m);
for i in range(0, m):
A[i] = range(0,n);
for j in range(0,n):
A[i][j] = tc_getMatrixValue(matrix,j,i);
#tc_deleteMatrix(matrix);
return [rows, cols, A];
def toMatrix(lists, row_wise = False , rows = [], cols = []):
n = len(lists);
m = len(lists[0]);
A = tc_createMatrix(0,0);
if row_wise:
A = tc_createMatrix(n,m);
else:
A = tc_createMatrix(m,n);
for i in range(0, n):
for j in range(0,m):
if row_wise:
tc_setMatrixValue(A,i,j,lists[i][j]);
else:
tc_setMatrixValue(A,j,i,lists[i][j]);
n = len(rows);
m = len(cols);
for i in range(0,n):
tc_setRowName(A,i,rows[i]);
for i in range(0,m):
tc_setColumnName(A,i,cols[i]);
return A;
def fromTable(table, row_wise = False):
n = table.rows;
m = table.cols;
cols = fromStrings(table.colnames);
rows = fromStrings(table.rownames);
if row_wise:
A = range(0,n);
for i in range(0, n):
A[i] = range(0,m);
for j in range(0,m):
A[i][j] = tc_getTableValue(table,i,j);
else:
A = range(0,m);
for i in range(0, m):
A[i] = range(0,n);
for j in range(0,n):
A[i][j] = tc_getTableValue(table,j,i);
return [rows, cols, A];
def toTable(lists, row_wise = False , rows = [], cols = []):
n = len(lists);
m = len(lists[0]);
A = tc_createTable(0,0);
if row_wise:
A = tc_createTable(n,m);
else:
A = tc_createTable(m,n);
for i in range(0, n):
for j in range(0,m):
if row_wise:
tc_setTableValue(A,i,j,lists[i][j]);
else:
tc_setTableValue(A,j,i,lists[i][j]);
n = len(rows);
m = len(cols);
for i in range(0,n):
tc_setString(A.rownames,i,rows[i]);
for i in range(0,m):
tc_setString(A.colnames,i,cols[i]);
return A;
def toHex(r,g,b):
hexchars = "0123456789ABCDEF0";
return "#" + hexchars[r / 16] + hexchars[r % 16] + hexchars[g / 16] + hexchars[g % 16] + hexchars[b / 16] + hexchars[b % 16];
def fromTC(x):
if type(x) == type(tc_createMatrix(0,0)): return fromMatrix(x)
if type(x) == type(tc_createStringsArray(0)): return fromStrings(x)
if type(x) == type(tc_createItemsArray(0)): return fromItems(x)
if type(x) == type(tc_createTable(0,0)): return fromTable(x)
return x
def toTC(x, rows = [], cols = []):
if type(x) == type([]) and len(x) > 0 and type(x[0]) == type([]):
if (type(x[0][0]) == type(1.0)):
return toMatrix(x,False, rows,cols)
elif (type(x[0][0]) == type('hello')):
return toTable(x,False, rows,cols)
if type(x) == type([]) and len(x) > 0 and type(x[0]) == type('hello'): return toStrings(x)
if type(x) == type([]) and len(x) > 0 and type(x[0]) == type(1): return toItems(x)
return x
| 26.966887
| 129
| 0.528978
|
d092b42dd0a672786ee84b594d06e7501750ceb9
| 25,053
|
py
|
Python
|
python/thunder/rdds/fileio/readers.py
|
Andrewosh/thunder
|
500892f80f758313e3788d50d2c281da64561cbf
|
[
"Apache-2.0"
] | null | null | null |
python/thunder/rdds/fileio/readers.py
|
Andrewosh/thunder
|
500892f80f758313e3788d50d2c281da64561cbf
|
[
"Apache-2.0"
] | null | null | null |
python/thunder/rdds/fileio/readers.py
|
Andrewosh/thunder
|
500892f80f758313e3788d50d2c281da64561cbf
|
[
"Apache-2.0"
] | null | null | null |
"""Classes that abstract reading from various types of filesystems.
Currently two types of 'filesystem' are supported:
* the local file system, via python's native file() objects
* Amazon's S3, using the boto library (only if boto is installed; boto is not a requirement)
For each filesystem, two types of reader classes are provided:
* parallel readers are intended to serve as the entry point to a Spark workflow. They provide a read() method
that itself calls the spark context's parallelize() method, setting up a workflow with one partition per file. This
method returns a Spark RDD of <string filename, string binary data>.
* file readers are intended to abstract across the supported filesystems, providing a consistent interface to several
common file and filesystem operations. These include listing files in a directory, reading the contents of a file,
and providing a file handle or handle-like object that itself supports read(), seek(), and tell() operations.
The reader classes also all support a common syntax for path specifications, including both "standard" file paths
and "URI-like" syntax with an explicitly specified scheme (for instance, "file://" or "s3n://"). This path specification
syntax allows a single wildcard "*" character in the filename, making possible paths like
"s3n:///my-bucket/key-one/foo*.bar", referring to "every object in the S3 bucket my-bucket whose key starts with
'key-one/foo' and ends with '.bar'".
"""
import errno
import fnmatch
import glob
import itertools
import os
import urllib
import urlparse
from thunder.utils.common import AWSCredentials
_haveBoto = False
try:
import boto
_haveBoto = True
except ImportError:
boto = None
class FileNotFoundError(IOError):
"""An exception to be thrown when reader implementations can't find a requested file.
Implementations are responsible for watching for their own appropriate exceptions and rethrowing
FileNotFoundError.
See PEP 3151 for background and inspiration.
"""
pass
def appendExtensionToPathSpec(dataPath, ext=None):
"""Helper function for consistent handling of paths given with separately passed file extensions
Returns
-------
result: string dataPath
dataPath string formed by concatenating passed `dataPath` with "*" and passed `ext`, with some
normalization as appropriate
"""
if ext:
if '*' in dataPath:
# we already have a literal wildcard, which we take as a sign that the user knows
# what they're doing and don't want us overriding their path by appending extensions to it
return dataPath
elif os.path.splitext(dataPath)[1]:
# looks like we already have a literal extension specified at the end of dataPath.
# go with that.
return dataPath
else:
# no wildcard in path yet
# check whether we already end in `ext`, which suggests we've been passed a literal filename.
# prepend '.' to ext, as mild protection against the case where we have a directory 'bin' and
# are looking in it for files named '*.bin'.
if not ext.startswith('.'):
ext = '.'+ext
if not dataPath.endswith(ext):
# we have an extension and we'd like to append it.
# we assume that dataPath should be pointing to a directory at this point, but we might
# or might not have a directory separator at the end of it. add it if we don't.
if not dataPath.endswith(os.path.sep):
dataPath += os.path.sep
# return a path with "/*."+`ext` added to it.
return dataPath+'*'+ext
else:
# we are asking to append `ext`, but it looks like dataPath already ends with '.'+`ext`
return dataPath
else:
return dataPath
def selectByStartAndStopIndices(files, startIdx, stopIdx):
"""Helper function for consistent handling of start and stop indices
"""
if startIdx or stopIdx:
if startIdx is None:
startIdx = 0
if stopIdx is None:
stopIdx = len(files)
files = files[startIdx:stopIdx]
return files
def _localRead(filePath, startOffset=None, size=-1):
"""Wrapper around open(filepath, 'rb') that returns the contents of the file as a string.
Will rethrow FileNotFoundError if it receives an IOError with error number indicating that the file isn't found.
"""
try:
with open(filePath, 'rb') as f:
if startOffset:
f.seek(startOffset)
buf = f.read(size)
except IOError, e:
if e.errno == errno.ENOENT:
raise FileNotFoundError(e)
else:
raise
return buf
class LocalFSParallelReader(object):
"""Parallel reader backed by python's native file() objects.
"""
def __init__(self, sparkContext, **kwargs):
# kwargs allow AWS credentials to be passed into generic Readers w/o exceptions being raised
# in this case kwargs are just ignored
self.sc = sparkContext
self.lastNRecs = None
@staticmethod
def uriToPath(uri):
# thanks stack overflow:
# http://stackoverflow.com/questions/5977576/is-there-a-convenient-way-to-map-a-file-uri-to-os-path
path = urllib.url2pathname(urlparse.urlparse(uri).path)
if uri and (not path):
# passed a nonempty uri, got an empty path back
# this happens when given a file uri that starts with "file://" instead of "file:///"
# error here to prevent unexpected behavior of looking at current working directory
raise ValueError("Could not interpret %s as URI. " +
"Note absolute paths in URIs should start with 'file:///', not 'file://'")
return path
@staticmethod
def _listFilesRecursive(absPath, ext=None):
filenames = set()
for root, dirs, files in os.walk(absPath):
if ext:
files = fnmatch.filter(files, '*.' + ext)
for filename in files:
filenames.add(os.path.join(root, filename))
filenames = list(filenames)
filenames.sort()
return sorted(filenames)
@staticmethod
def _listFilesNonRecursive(absPath, ext=None):
if os.path.isdir(absPath):
if ext:
files = glob.glob(os.path.join(absPath, '*.' + ext))
else:
files = [os.path.join(absPath, fname) for fname in os.listdir(absPath)]
else:
files = glob.glob(absPath)
# filter out directories
files = [fpath for fpath in files if not os.path.isdir(fpath)]
return sorted(files)
def listFiles(self, absPath, ext=None, startIdx=None, stopIdx=None, recursive=False):
"""Get sorted list of file paths matching passed `absPath` path and `ext` filename extension
"""
files = LocalFSParallelReader._listFilesNonRecursive(absPath, ext) if not recursive else \
LocalFSParallelReader._listFilesRecursive(absPath, ext)
if len(files) < 1:
raise FileNotFoundError('cannot find files of type "%s" in %s' % (ext if ext else '*', absPath))
files = selectByStartAndStopIndices(files, startIdx, stopIdx)
return files
def read(self, dataPath, ext=None, startIdx=None, stopIdx=None, recursive=False, npartitions=None):
"""Sets up Spark RDD across files specified by dataPath on local filesystem.
Returns RDD of <integer file index, string buffer> k/v pairs.
"""
absPath = self.uriToPath(dataPath)
filePaths = self.listFiles(absPath, ext=ext, startIdx=startIdx, stopIdx=stopIdx, recursive=recursive)
lfilepaths = len(filePaths)
self.lastNRecs = lfilepaths
npartitions = min(npartitions, lfilepaths) if npartitions else lfilepaths
return self.sc.parallelize(enumerate(filePaths), npartitions).map(lambda (k, v): (k, _localRead(v)))
class _BotoS3Client(object):
"""Superclass for boto-based S3 readers.
"""
@staticmethod
def parseS3Query(query, delim='/'):
keyName = ''
prefix = ''
postfix = ''
parseResult = urlparse.urlparse(query)
bucketName = parseResult.netloc
keyQuery = parseResult.path.lstrip(delim)
if not parseResult.scheme.lower() in ('', "s3", "s3n"):
raise ValueError("Query scheme must be one of '', 's3', or 's3n'; got: '%s'" % parseResult.scheme)
# special case handling for strings of form "/bucket/dir":
if (not bucketName.strip()) and keyQuery:
toks = keyQuery.split(delim, 1)
bucketName = toks[0]
if len(toks) == 2:
keyQuery = toks[1]
else:
keyQuery = ''
if not bucketName.strip():
raise ValueError("Could not parse bucket name from query string '%s'" % query)
keyToks = keyQuery.split("*")
nkeyToks = len(keyToks)
if nkeyToks == 0:
pass
elif nkeyToks == 1:
keyName = keyToks[0]
elif nkeyToks == 2:
rdelimIdx = keyToks[0].rfind(delim)
if rdelimIdx >= 0:
keyName = keyToks[0][:(rdelimIdx+1)]
prefix = keyToks[0][(rdelimIdx+1):] if len(keyToks[0]) > (rdelimIdx+1) else ''
else:
prefix = keyToks[0]
postfix = keyToks[1]
else:
raise ValueError("Only one wildcard ('*') allowed in query string, got: '%s'" % query)
return bucketName, keyName, prefix, postfix
@staticmethod
def checkPrefix(bucket, keyPath, delim='/'):
return len(bucket.get_all_keys(prefix=keyPath, delimiter=delim, max_keys=1)) > 0
@staticmethod
def filterPredicate(key, post, inclusive=False):
kname = key.name
keyEndsWithPostfix = kname.endswith(post)
return keyEndsWithPostfix if inclusive else not keyEndsWithPostfix
@staticmethod
def retrieveKeys(bucket, key, prefix='', postfix='', delim='/', includeDirectories=False,
recursive=False):
if key and prefix:
assert key.endswith(delim)
keyPath = key+prefix
# if we are asking for a key that doesn't end in a delimiter, check whether it might
# actually be a directory
if not keyPath.endswith(delim) and keyPath:
# not all directories have actual keys associated with them
# check for matching prefix instead of literal key:
if _BotoS3Client.checkPrefix(bucket, keyPath+delim, delim=delim):
# found a directory; change path so that it explicitly refers to directory
keyPath += delim
listDelim = delim if not recursive else None
results = bucket.list(prefix=keyPath, delimiter=listDelim)
if postfix:
return itertools.ifilter(lambda k_: _BotoS3Client.filterPredicate(k_, postfix, inclusive=True), results)
elif not includeDirectories:
return itertools.ifilter(lambda k_: _BotoS3Client.filterPredicate(k_, delim, inclusive=False), results)
else:
return results
def __init__(self, awsCredentialsOverride=None):
"""Initialization; validates that AWS keys are available as environment variables.
Will let boto library look up credentials itself according to its own rules - e.g. first looking for
AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY, then going through several possible config files and finally
looking for a ~/.aws/credentials .ini-formatted file. See boto docs:
http://boto.readthedocs.org/en/latest/boto_config_tut.html
However, if an AWSCredentials object is provided, its `awsAccessKeyId` and `awsSecretAccessKey` attributes
will be used instead of those found by the standard boto credential lookup process.
"""
if not _haveBoto:
raise ValueError("The boto package does not appear to be available; boto is required for BotoS3Reader")
self.awsCredentialsOverride = awsCredentialsOverride if awsCredentialsOverride else AWSCredentials()
class BotoS3ParallelReader(_BotoS3Client):
"""Parallel reader backed by boto AWS client library.
"""
def __init__(self, sparkContext, awsCredentialsOverride=None):
super(BotoS3ParallelReader, self).__init__(awsCredentialsOverride=awsCredentialsOverride)
self.sc = sparkContext
self.lastNRecs = None
def listFiles(self, dataPath, ext=None, startIdx=None, stopIdx=None, recursive=False):
bucketname, keyNames = self._listFilesImpl(dataPath, ext=ext, startIdx=startIdx, stopIdx=stopIdx,
recursive=recursive)
return ["s3n:///%s/%s" % (bucketname, keyname) for keyname in keyNames]
def _listFilesImpl(self, dataPath, ext=None, startIdx=None, stopIdx=None, recursive=False):
parse = _BotoS3Client.parseS3Query(dataPath)
conn = boto.connect_s3(**self.awsCredentialsOverride.credentialsAsDict)
bucket = conn.get_bucket(parse[0])
keys = _BotoS3Client.retrieveKeys(bucket, parse[1], prefix=parse[2], postfix=parse[3], recursive=recursive)
keyNameList = [key.name for key in keys]
if ext:
keyNameList = [keyname for keyname in keyNameList if keyname.endswith(ext)]
keyNameList.sort()
keyNameList = selectByStartAndStopIndices(keyNameList, startIdx, stopIdx)
return bucket.name, keyNameList
def read(self, dataPath, ext=None, startIdx=None, stopIdx=None, recursive=False, npartitions=None):
"""Sets up Spark RDD across S3 objects specified by dataPath.
Returns RDD of <string s3 keyname, string buffer> k/v pairs.
"""
dataPath = appendExtensionToPathSpec(dataPath, ext)
bucketName, keyNameList = self._listFilesImpl(dataPath, startIdx=startIdx, stopIdx=stopIdx, recursive=recursive)
if not keyNameList:
raise FileNotFoundError("No S3 objects found for '%s'" % dataPath)
# try to prevent self from getting pulled into the closure
awsAccessKeyIdOverride_, awsSecretAccessKeyOverride_ = self.awsCredentialsOverride.credentials
def readSplitFromS3(kvIter):
conn = boto.connect_s3(aws_access_key_id=awsAccessKeyIdOverride_,
aws_secret_access_key=awsSecretAccessKeyOverride_)
bucket = conn.get_bucket(bucketName)
for kv in kvIter:
idx, keyName = kv
key = bucket.get_key(keyName)
buf = key.get_contents_as_string()
yield idx, buf
self.lastNRecs = len(keyNameList)
npartitions = min(npartitions, self.lastNRecs) if npartitions else self.lastNRecs
return self.sc.parallelize(enumerate(keyNameList), npartitions).mapPartitions(readSplitFromS3)
class LocalFSFileReader(object):
"""File reader backed by python's native file() objects.
"""
def __init__(self, **kwargs):
# do nothing; allows AWS access keys to be passed in to a generic Reader instance w/o blowing up
pass
def __listRecursive(self, dataPath):
if os.path.isdir(dataPath):
dirname = dataPath
matchpattern = None
else:
dirname, matchpattern = os.path.split(dataPath)
filenames = set()
for root, dirs, files in os.walk(dirname):
if matchpattern:
files = fnmatch.filter(files, matchpattern)
for filename in files:
filenames.add(os.path.join(root, filename))
filenames = list(filenames)
filenames.sort()
return filenames
def list(self, dataPath, filename=None, startIdx=None, stopIdx=None, recursive=False,
includeDirectories=False):
"""List files specified by dataPath.
Datapath may include a single wildcard ('*') in the filename specifier.
Returns sorted list of absolute path strings.
"""
absPath = LocalFSParallelReader.uriToPath(dataPath)
if (not filename) and recursive:
return self.__listRecursive(absPath)
if filename:
if os.path.isdir(absPath):
absPath = os.path.join(absPath, filename)
else:
absPath = os.path.join(os.path.dirname(absPath), filename)
else:
if os.path.isdir(absPath) and not includeDirectories:
absPath = os.path.join(absPath, "*")
files = glob.glob(absPath)
# filter out directories
if not includeDirectories:
files = [fpath for fpath in files if not os.path.isdir(fpath)]
files.sort()
files = selectByStartAndStopIndices(files, startIdx, stopIdx)
return files
def read(self, dataPath, filename=None, startOffset=None, size=-1):
filenames = self.list(dataPath, filename=filename)
if not filenames:
raise FileNotFoundError("No file found matching: '%s'" % dataPath)
if len(filenames) > 1:
raise ValueError("Found multiple files matching: '%s'" % dataPath)
return _localRead(filenames[0], startOffset=startOffset, size=size)
def open(self, dataPath, filename=None):
filenames = self.list(dataPath, filename=filename)
if not filenames:
raise FileNotFoundError("No file found matching: '%s'" % dataPath)
if len(filenames) > 1:
raise ValueError("Found multiple files matching: '%s'" % dataPath)
return open(filenames[0], 'rb')
class BotoS3FileReader(_BotoS3Client):
"""File reader backed by the boto AWS client library.
"""
def __getMatchingKeys(self, dataPath, filename=None, includeDirectories=False, recursive=False):
parse = _BotoS3Client.parseS3Query(dataPath)
conn = boto.connect_s3(**self.awsCredentialsOverride.credentialsAsDict)
bucketName = parse[0]
keyName = parse[1]
bucket = conn.get_bucket(bucketName)
if filename:
# check whether last section of dataPath refers to a directory
if not keyName.endswith("/"):
if self.checkPrefix(bucket, keyName + "/"):
# keyname is a directory, but we've omitted the trailing "/"
keyName += "/"
else:
# assume keyname refers to an object other than a directory
# look for filename in same directory as keyname
slashIdx = keyName.rfind("/")
if slashIdx >= 0:
keyName = keyName[:(slashIdx+1)]
else:
# no directory separators, so our object is in the top level of the bucket
keyName = ""
keyName += filename
return _BotoS3Client.retrieveKeys(bucket, keyName, prefix=parse[2], postfix=parse[3],
includeDirectories=includeDirectories, recursive=recursive)
def list(self, dataPath, filename=None, startIdx=None, stopIdx=None, recursive=False, includeDirectories=False):
"""List s3 objects specified by dataPath.
Returns sorted list of 's3n://' URIs.
"""
keys = self.__getMatchingKeys(dataPath, filename=filename, includeDirectories=includeDirectories,
recursive=recursive)
keyNames = ["s3n:///" + key.bucket.name + "/" + key.name for key in keys]
keyNames.sort()
keyNames = selectByStartAndStopIndices(keyNames, startIdx, stopIdx)
return keyNames
def __getSingleMatchingKey(self, dataPath, filename=None):
keys = self.__getMatchingKeys(dataPath, filename=filename)
# keys is probably a lazy-loading ifilter iterable
try:
key = keys.next()
except StopIteration:
raise FileNotFoundError("Could not find S3 object for: '%s'" % dataPath)
# we expect to only have a single key returned
nextKey = None
try:
nextKey = keys.next()
except StopIteration:
pass
if nextKey:
raise ValueError("Found multiple S3 keys for: '%s'" % dataPath)
return key
def read(self, dataPath, filename=None, startOffset=None, size=-1):
key = self.__getSingleMatchingKey(dataPath, filename=filename)
if startOffset or (size > -1):
# specify Range header in S3 request
# see: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35
# and: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html
if not startOffset:
startOffset = 0
if size > -1:
sizeStr = startOffset + size - 1 # range header is inclusive
else:
sizeStr = ""
hdrs = {"Range": "bytes=%d-%s" % (startOffset, sizeStr)}
return key.get_contents_as_string(headers=hdrs)
else:
return key.get_contents_as_string()
def open(self, dataPath, filename=None):
key = self.__getSingleMatchingKey(dataPath, filename=filename)
return BotoS3ReadFileHandle(key)
class BotoS3ReadFileHandle(object):
"""Read-only file handle-like object exposing a subset of file methods.
Returned by BotoS3FileReader's open() method.
"""
def __init__(self, key):
self._key = key
self._closed = False
self._offset = 0
def close(self):
try:
self._key.close(fast=True)
except TypeError:
# workaround for early versions of boto that don't have the 'fast' keyword
self._key.close()
self._closed = True
def read(self, size=-1):
if self._offset or (size > -1):
# return empty string to indicate EOF if we are offset past the end of the file
# else boto will throw an error at us
if self._offset >= self._key.size:
return ""
if size > -1:
sizeStr = str(self._offset + size - 1) # range header is inclusive
else:
sizeStr = ""
hdrs = {"Range": "bytes=%d-%s" % (self._offset, sizeStr)}
else:
hdrs = {}
buf = self._key.get_contents_as_string(headers=hdrs)
self._offset += len(buf)
return buf
def seek(self, offset, whence=0):
if whence == 0:
self._offset = offset
elif whence == 1:
self._offset += offset
elif whence == 2:
self._offset = self._key.size + offset
else:
raise IOError("Invalid 'whence' argument, must be 0, 1, or 2. See file().seek.")
def tell(self):
return self._offset
@property
def closed(self):
return self._closed
@property
def name(self):
return "s3n:///" + self._key.bucket.name + "/" + self._key.name
@property
def mode(self):
return "rb"
SCHEMAS_TO_PARALLELREADERS = {
'': LocalFSParallelReader,
'file': LocalFSParallelReader,
's3': BotoS3ParallelReader,
's3n': BotoS3ParallelReader,
'hdfs': None,
'http': None,
'https': None,
'ftp': None
}
SCHEMAS_TO_FILEREADERS = {
'': LocalFSFileReader,
'file': LocalFSFileReader,
's3': BotoS3FileReader,
's3n': BotoS3FileReader,
'hdfs': None,
'http': None,
'https': None,
'ftp': None
}
def getByScheme(dataPath, lookup, default):
"""Helper function used by get*ForPath().
"""
parseresult = urlparse.urlparse(dataPath)
clazz = lookup.get(parseresult.scheme, default)
if clazz is None:
raise NotImplementedError("No implementation for scheme " + parseresult.scheme)
return clazz
def getParallelReaderForPath(dataPath):
"""Returns the class of a parallel reader suitable for the scheme used by `dataPath`.
The resulting class object must still be instantiated in order to get a usable instance of the class.
Throws NotImplementedError if the requested scheme is explicitly not supported (e.g. "ftp://").
Returns LocalFSParallelReader if scheme is absent or not recognized.
"""
return getByScheme(dataPath, SCHEMAS_TO_PARALLELREADERS, LocalFSParallelReader)
def getFileReaderForPath(dataPath):
"""Returns the class of a file reader suitable for the scheme used by `dataPath`.
The resulting class object must still be instantiated in order to get a usable instance of the class.
Throws NotImplementedError if the requested scheme is explicitly not supported (e.g. "ftp://").
Returns LocalFSFileReader if scheme is absent or not recognized.
"""
return getByScheme(dataPath, SCHEMAS_TO_FILEREADERS, LocalFSFileReader)
| 40.342995
| 120
| 0.636371
|
ee8649587e8c87e2722ed52066c8977d5da4fca8
| 185
|
py
|
Python
|
zee-news-crawler/config.py
|
project-anuvaad/anuvaad-corpus-tools
|
7624b48bc32eda6cf2efe126e28ad41a9763fe10
|
[
"MIT"
] | 6
|
2021-03-17T10:25:44.000Z
|
2022-03-10T11:28:51.000Z
|
timesofindia-crawler/config.py
|
project-anuvaad/anuvaad-corpus-tools
|
7624b48bc32eda6cf2efe126e28ad41a9763fe10
|
[
"MIT"
] | null | null | null |
timesofindia-crawler/config.py
|
project-anuvaad/anuvaad-corpus-tools
|
7624b48bc32eda6cf2efe126e28ad41a9763fe10
|
[
"MIT"
] | 7
|
2020-12-17T07:23:29.000Z
|
2021-12-01T14:35:28.000Z
|
CHROME_DRIVER_PATH = "chromedriver"
DRIVER_WAIT_TIME = 20
CSV_FILE_ENCODING = "utf-16"
ANUVAAD_USERNAME = "stageuser@tarento.com"
ANUVAAD_PASSWORD = "Welcome@123"
BREAK_PROGRESS = True
| 26.428571
| 42
| 0.805405
|
50448dcc47588b8765d3e06815f96b4f1d7c6795
| 27,025
|
py
|
Python
|
sdk/python/pulumi_aws/ec2/volume_attachment.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-10T16:33:40.000Z
|
2021-11-10T16:33:40.000Z
|
sdk/python/pulumi_aws/ec2/volume_attachment.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/ec2/volume_attachment.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['VolumeAttachmentArgs', 'VolumeAttachment']
@pulumi.input_type
class VolumeAttachmentArgs:
def __init__(__self__, *,
device_name: pulumi.Input[str],
instance_id: pulumi.Input[str],
volume_id: pulumi.Input[str],
force_detach: Optional[pulumi.Input[bool]] = None,
skip_destroy: Optional[pulumi.Input[bool]] = None,
stop_instance_before_detaching: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a VolumeAttachment resource.
:param pulumi.Input[str] device_name: The device name to expose to the instance (for
example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names) and [Device Naming on Windows Instances](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names) for more information.
:param pulumi.Input[str] instance_id: ID of the Instance to attach to
:param pulumi.Input[str] volume_id: ID of the Volume to be attached
:param pulumi.Input[bool] force_detach: Set to `true` if you want to force the
volume to detach. Useful if previous attempts failed, but use this option only
as a last resort, as this can result in **data loss**. See
[Detaching an Amazon EBS Volume from an Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html) for more information.
:param pulumi.Input[bool] skip_destroy: Set this to true if you do not wish
to detach the volume from the instance to which it is attached at destroy
time, and instead just remove the attachment from this provider state. This is
useful when destroying an instance which has volumes created by some other
means attached.
:param pulumi.Input[bool] stop_instance_before_detaching: Set this to true to ensure that the target instance is stopped
before trying to detach the volume. Stops the instance, if it is not already stopped.
"""
pulumi.set(__self__, "device_name", device_name)
pulumi.set(__self__, "instance_id", instance_id)
pulumi.set(__self__, "volume_id", volume_id)
if force_detach is not None:
pulumi.set(__self__, "force_detach", force_detach)
if skip_destroy is not None:
pulumi.set(__self__, "skip_destroy", skip_destroy)
if stop_instance_before_detaching is not None:
pulumi.set(__self__, "stop_instance_before_detaching", stop_instance_before_detaching)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> pulumi.Input[str]:
"""
The device name to expose to the instance (for
example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names) and [Device Naming on Windows Instances](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names) for more information.
"""
return pulumi.get(self, "device_name")
@device_name.setter
def device_name(self, value: pulumi.Input[str]):
pulumi.set(self, "device_name", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[str]:
"""
ID of the Instance to attach to
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="volumeId")
def volume_id(self) -> pulumi.Input[str]:
"""
ID of the Volume to be attached
"""
return pulumi.get(self, "volume_id")
@volume_id.setter
def volume_id(self, value: pulumi.Input[str]):
pulumi.set(self, "volume_id", value)
@property
@pulumi.getter(name="forceDetach")
def force_detach(self) -> Optional[pulumi.Input[bool]]:
"""
Set to `true` if you want to force the
volume to detach. Useful if previous attempts failed, but use this option only
as a last resort, as this can result in **data loss**. See
[Detaching an Amazon EBS Volume from an Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html) for more information.
"""
return pulumi.get(self, "force_detach")
@force_detach.setter
def force_detach(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_detach", value)
@property
@pulumi.getter(name="skipDestroy")
def skip_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
Set this to true if you do not wish
to detach the volume from the instance to which it is attached at destroy
time, and instead just remove the attachment from this provider state. This is
useful when destroying an instance which has volumes created by some other
means attached.
"""
return pulumi.get(self, "skip_destroy")
@skip_destroy.setter
def skip_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "skip_destroy", value)
@property
@pulumi.getter(name="stopInstanceBeforeDetaching")
def stop_instance_before_detaching(self) -> Optional[pulumi.Input[bool]]:
"""
Set this to true to ensure that the target instance is stopped
before trying to detach the volume. Stops the instance, if it is not already stopped.
"""
return pulumi.get(self, "stop_instance_before_detaching")
@stop_instance_before_detaching.setter
def stop_instance_before_detaching(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "stop_instance_before_detaching", value)
@pulumi.input_type
class _VolumeAttachmentState:
def __init__(__self__, *,
device_name: Optional[pulumi.Input[str]] = None,
force_detach: Optional[pulumi.Input[bool]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
skip_destroy: Optional[pulumi.Input[bool]] = None,
stop_instance_before_detaching: Optional[pulumi.Input[bool]] = None,
volume_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering VolumeAttachment resources.
:param pulumi.Input[str] device_name: The device name to expose to the instance (for
example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names) and [Device Naming on Windows Instances](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names) for more information.
:param pulumi.Input[bool] force_detach: Set to `true` if you want to force the
volume to detach. Useful if previous attempts failed, but use this option only
as a last resort, as this can result in **data loss**. See
[Detaching an Amazon EBS Volume from an Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html) for more information.
:param pulumi.Input[str] instance_id: ID of the Instance to attach to
:param pulumi.Input[bool] skip_destroy: Set this to true if you do not wish
to detach the volume from the instance to which it is attached at destroy
time, and instead just remove the attachment from this provider state. This is
useful when destroying an instance which has volumes created by some other
means attached.
:param pulumi.Input[bool] stop_instance_before_detaching: Set this to true to ensure that the target instance is stopped
before trying to detach the volume. Stops the instance, if it is not already stopped.
:param pulumi.Input[str] volume_id: ID of the Volume to be attached
"""
if device_name is not None:
pulumi.set(__self__, "device_name", device_name)
if force_detach is not None:
pulumi.set(__self__, "force_detach", force_detach)
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if skip_destroy is not None:
pulumi.set(__self__, "skip_destroy", skip_destroy)
if stop_instance_before_detaching is not None:
pulumi.set(__self__, "stop_instance_before_detaching", stop_instance_before_detaching)
if volume_id is not None:
pulumi.set(__self__, "volume_id", volume_id)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> Optional[pulumi.Input[str]]:
"""
The device name to expose to the instance (for
example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names) and [Device Naming on Windows Instances](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names) for more information.
"""
return pulumi.get(self, "device_name")
@device_name.setter
def device_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "device_name", value)
@property
@pulumi.getter(name="forceDetach")
def force_detach(self) -> Optional[pulumi.Input[bool]]:
"""
Set to `true` if you want to force the
volume to detach. Useful if previous attempts failed, but use this option only
as a last resort, as this can result in **data loss**. See
[Detaching an Amazon EBS Volume from an Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html) for more information.
"""
return pulumi.get(self, "force_detach")
@force_detach.setter
def force_detach(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_detach", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the Instance to attach to
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="skipDestroy")
def skip_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
Set this to true if you do not wish
to detach the volume from the instance to which it is attached at destroy
time, and instead just remove the attachment from this provider state. This is
useful when destroying an instance which has volumes created by some other
means attached.
"""
return pulumi.get(self, "skip_destroy")
@skip_destroy.setter
def skip_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "skip_destroy", value)
@property
@pulumi.getter(name="stopInstanceBeforeDetaching")
def stop_instance_before_detaching(self) -> Optional[pulumi.Input[bool]]:
"""
Set this to true to ensure that the target instance is stopped
before trying to detach the volume. Stops the instance, if it is not already stopped.
"""
return pulumi.get(self, "stop_instance_before_detaching")
@stop_instance_before_detaching.setter
def stop_instance_before_detaching(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "stop_instance_before_detaching", value)
@property
@pulumi.getter(name="volumeId")
def volume_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the Volume to be attached
"""
return pulumi.get(self, "volume_id")
@volume_id.setter
def volume_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "volume_id", value)
class VolumeAttachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
device_name: Optional[pulumi.Input[str]] = None,
force_detach: Optional[pulumi.Input[bool]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
skip_destroy: Optional[pulumi.Input[bool]] = None,
stop_instance_before_detaching: Optional[pulumi.Input[bool]] = None,
volume_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an AWS EBS Volume Attachment as a top level resource, to attach and
detach volumes from AWS Instances.
> **NOTE on EBS block devices:** If you use `ebs_block_device` on an `ec2.Instance`, this provider will assume management over the full set of non-root EBS block devices for the instance, and treats additional block devices as drift. For this reason, `ebs_block_device` cannot be mixed with external `ebs.Volume` + `aws_ebs_volume_attachment` resources for a given instance.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
web = aws.ec2.Instance("web",
ami="ami-21f78e11",
availability_zone="us-west-2a",
instance_type="t2.micro",
tags={
"Name": "HelloWorld",
})
example = aws.ebs.Volume("example",
availability_zone="us-west-2a",
size=1)
ebs_att = aws.ec2.VolumeAttachment("ebsAtt",
device_name="/dev/sdh",
volume_id=example.id,
instance_id=web.id)
```
## Import
EBS Volume Attachments can be imported using `DEVICE_NAME:VOLUME_ID:INSTANCE_ID`, e.g.,
```sh
$ pulumi import aws:ec2/volumeAttachment:VolumeAttachment example /dev/sdh:vol-049df61146c4d7901:i-12345678
```
[1]https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names [2]https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names [3]https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] device_name: The device name to expose to the instance (for
example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names) and [Device Naming on Windows Instances](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names) for more information.
:param pulumi.Input[bool] force_detach: Set to `true` if you want to force the
volume to detach. Useful if previous attempts failed, but use this option only
as a last resort, as this can result in **data loss**. See
[Detaching an Amazon EBS Volume from an Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html) for more information.
:param pulumi.Input[str] instance_id: ID of the Instance to attach to
:param pulumi.Input[bool] skip_destroy: Set this to true if you do not wish
to detach the volume from the instance to which it is attached at destroy
time, and instead just remove the attachment from this provider state. This is
useful when destroying an instance which has volumes created by some other
means attached.
:param pulumi.Input[bool] stop_instance_before_detaching: Set this to true to ensure that the target instance is stopped
before trying to detach the volume. Stops the instance, if it is not already stopped.
:param pulumi.Input[str] volume_id: ID of the Volume to be attached
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VolumeAttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an AWS EBS Volume Attachment as a top level resource, to attach and
detach volumes from AWS Instances.
> **NOTE on EBS block devices:** If you use `ebs_block_device` on an `ec2.Instance`, this provider will assume management over the full set of non-root EBS block devices for the instance, and treats additional block devices as drift. For this reason, `ebs_block_device` cannot be mixed with external `ebs.Volume` + `aws_ebs_volume_attachment` resources for a given instance.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
web = aws.ec2.Instance("web",
ami="ami-21f78e11",
availability_zone="us-west-2a",
instance_type="t2.micro",
tags={
"Name": "HelloWorld",
})
example = aws.ebs.Volume("example",
availability_zone="us-west-2a",
size=1)
ebs_att = aws.ec2.VolumeAttachment("ebsAtt",
device_name="/dev/sdh",
volume_id=example.id,
instance_id=web.id)
```
## Import
EBS Volume Attachments can be imported using `DEVICE_NAME:VOLUME_ID:INSTANCE_ID`, e.g.,
```sh
$ pulumi import aws:ec2/volumeAttachment:VolumeAttachment example /dev/sdh:vol-049df61146c4d7901:i-12345678
```
[1]https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names [2]https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names [3]https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html
:param str resource_name: The name of the resource.
:param VolumeAttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VolumeAttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
device_name: Optional[pulumi.Input[str]] = None,
force_detach: Optional[pulumi.Input[bool]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
skip_destroy: Optional[pulumi.Input[bool]] = None,
stop_instance_before_detaching: Optional[pulumi.Input[bool]] = None,
volume_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VolumeAttachmentArgs.__new__(VolumeAttachmentArgs)
if device_name is None and not opts.urn:
raise TypeError("Missing required property 'device_name'")
__props__.__dict__["device_name"] = device_name
__props__.__dict__["force_detach"] = force_detach
if instance_id is None and not opts.urn:
raise TypeError("Missing required property 'instance_id'")
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["skip_destroy"] = skip_destroy
__props__.__dict__["stop_instance_before_detaching"] = stop_instance_before_detaching
if volume_id is None and not opts.urn:
raise TypeError("Missing required property 'volume_id'")
__props__.__dict__["volume_id"] = volume_id
super(VolumeAttachment, __self__).__init__(
'aws:ec2/volumeAttachment:VolumeAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
device_name: Optional[pulumi.Input[str]] = None,
force_detach: Optional[pulumi.Input[bool]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
skip_destroy: Optional[pulumi.Input[bool]] = None,
stop_instance_before_detaching: Optional[pulumi.Input[bool]] = None,
volume_id: Optional[pulumi.Input[str]] = None) -> 'VolumeAttachment':
"""
Get an existing VolumeAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] device_name: The device name to expose to the instance (for
example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names) and [Device Naming on Windows Instances](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names) for more information.
:param pulumi.Input[bool] force_detach: Set to `true` if you want to force the
volume to detach. Useful if previous attempts failed, but use this option only
as a last resort, as this can result in **data loss**. See
[Detaching an Amazon EBS Volume from an Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html) for more information.
:param pulumi.Input[str] instance_id: ID of the Instance to attach to
:param pulumi.Input[bool] skip_destroy: Set this to true if you do not wish
to detach the volume from the instance to which it is attached at destroy
time, and instead just remove the attachment from this provider state. This is
useful when destroying an instance which has volumes created by some other
means attached.
:param pulumi.Input[bool] stop_instance_before_detaching: Set this to true to ensure that the target instance is stopped
before trying to detach the volume. Stops the instance, if it is not already stopped.
:param pulumi.Input[str] volume_id: ID of the Volume to be attached
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VolumeAttachmentState.__new__(_VolumeAttachmentState)
__props__.__dict__["device_name"] = device_name
__props__.__dict__["force_detach"] = force_detach
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["skip_destroy"] = skip_destroy
__props__.__dict__["stop_instance_before_detaching"] = stop_instance_before_detaching
__props__.__dict__["volume_id"] = volume_id
return VolumeAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> pulumi.Output[str]:
"""
The device name to expose to the instance (for
example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names) and [Device Naming on Windows Instances](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names) for more information.
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="forceDetach")
def force_detach(self) -> pulumi.Output[Optional[bool]]:
"""
Set to `true` if you want to force the
volume to detach. Useful if previous attempts failed, but use this option only
as a last resort, as this can result in **data loss**. See
[Detaching an Amazon EBS Volume from an Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html) for more information.
"""
return pulumi.get(self, "force_detach")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[str]:
"""
ID of the Instance to attach to
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter(name="skipDestroy")
def skip_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
Set this to true if you do not wish
to detach the volume from the instance to which it is attached at destroy
time, and instead just remove the attachment from this provider state. This is
useful when destroying an instance which has volumes created by some other
means attached.
"""
return pulumi.get(self, "skip_destroy")
@property
@pulumi.getter(name="stopInstanceBeforeDetaching")
def stop_instance_before_detaching(self) -> pulumi.Output[Optional[bool]]:
"""
Set this to true to ensure that the target instance is stopped
before trying to detach the volume. Stops the instance, if it is not already stopped.
"""
return pulumi.get(self, "stop_instance_before_detaching")
@property
@pulumi.getter(name="volumeId")
def volume_id(self) -> pulumi.Output[str]:
"""
ID of the Volume to be attached
"""
return pulumi.get(self, "volume_id")
| 52.071291
| 382
| 0.667863
|
eb3f17a9569a47fb5726aed97bba34046fe49f8f
| 14,580
|
py
|
Python
|
ansible/lib/ansible/modules/core/cloud/amazon/ec2_eip.py
|
kiv-box/redis
|
966a0c3f0a51282cd173b42a6e249d23f4e89dec
|
[
"Apache-2.0"
] | null | null | null |
ansible/lib/ansible/modules/core/cloud/amazon/ec2_eip.py
|
kiv-box/redis
|
966a0c3f0a51282cd173b42a6e249d23f4e89dec
|
[
"Apache-2.0"
] | null | null | null |
ansible/lib/ansible/modules/core/cloud/amazon/ec2_eip.py
|
kiv-box/redis
|
966a0c3f0a51282cd173b42a6e249d23f4e89dec
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_eip
short_description: manages EC2 elastic IP (EIP) addresses.
description:
- This module can allocate or release an EIP.
- This module can associate/disassociate an EIP with instances or network interfaces.
version_added: "1.4"
options:
device_id:
description:
- The id of the device for the EIP. Can be an EC2 Instance id or Elastic Network Interface (ENI) id.
required: false
aliases: [ instance_id ]
version_added: "2.0"
public_ip:
description:
- The IP address of a previously allocated EIP.
- If present and device is specified, the EIP is associated with the device.
- If absent and device is specified, the EIP is disassociated from the device.
required: false
aliases: [ ip ]
state:
description:
- If present, allocate an EIP or associate an existing EIP with a device.
- If absent, disassociate the EIP from the device and optionally release it.
required: false
choices: ['present', 'absent']
default: present
in_vpc:
description:
- allocate an EIP inside a VPC or not
required: false
default: false
version_added: "1.4"
reuse_existing_ip_allowed:
description:
- Reuse an EIP that is not associated to a device (when available), instead of allocating a new one.
required: false
default: false
version_added: "1.6"
release_on_disassociation:
description:
- whether or not to automatically release the EIP when it is disassociated
required: false
default: false
version_added: "2.0"
extends_documentation_fragment:
- aws
- ec2
author: "Rick Mendes (@rickmendes) <rmendes@illumina.com>"
notes:
- This module will return C(public_ip) on success, which will contain the
public IP address associated with the device.
- There may be a delay between the time the EIP is assigned and when
the cloud instance is reachable via the new address. Use wait_for and
pause to delay further playbook execution until the instance is reachable,
if necessary.
- This module returns multiple changed statuses on disassociation or release.
It returns an overall status based on any changes occuring. It also returns
individual changed statuses for disassociation and release.
'''
EXAMPLES = '''
- name: associate an elastic IP with an instance
ec2_eip: device_id=i-1212f003 ip=93.184.216.119
- name: associate an elastic IP with a device
ec2_eip: device_id=eni-c8ad70f3 ip=93.184.216.119
- name: disassociate an elastic IP from an instance
ec2_eip: device_id=i-1212f003 ip=93.184.216.119 state=absent
- name: disassociate an elastic IP with a device
ec2_eip: device_id=eni-c8ad70f3 ip=93.184.216.119 state=absent
- name: allocate a new elastic IP and associate it with an instance
ec2_eip: device_id=i-1212f003
- name: allocate a new elastic IP without associating it to anything
action: ec2_eip
register: eip
- name: output the IP
debug: msg="Allocated IP is {{ eip.public_ip }}"
- name: another way of allocating an elastic IP without associating it to anything
ec2_eip: state='present'
- name: provision new instances with ec2
ec2: keypair=mykey instance_type=c1.medium image=ami-40603AD1 wait=yes'''
''' group=webserver count=3
register: ec2
- name: associate new elastic IPs with each of the instances
ec2_eip: "device_id={{ item }}"
with_items: ec2.instance_ids
- name: allocate a new elastic IP inside a VPC in us-west-2
ec2_eip: region=us-west-2 in_vpc=yes
register: eip
- name: output the IP
debug: msg="Allocated IP inside a VPC is {{ eip.public_ip }}"
'''
try:
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class EIPException(Exception):
pass
def associate_ip_and_device(ec2, address, device_id, check_mode, isinstance=True):
if address_is_associated_with_device(ec2, address, device_id, isinstance):
return {'changed': False}
# If we're in check mode, nothing else to do
if not check_mode:
if isinstance:
if address.domain == "vpc":
res = ec2.associate_address(device_id, allocation_id=address.allocation_id)
else:
res = ec2.associate_address(device_id, public_ip=address.public_ip)
else:
res = ec2.associate_address(network_interface_id=device_id, allocation_id=address.allocation_id)
if not res:
raise EIPException('association failed')
return {'changed': True}
def disassociate_ip_and_device(ec2, address, device_id, check_mode, isinstance=True):
if not address_is_associated_with_device(ec2, address, device_id, isinstance):
return {'changed': False}
# If we're in check mode, nothing else to do
if not check_mode:
if address.domain == 'vpc':
res = ec2.disassociate_address(
association_id=address.association_id)
else:
res = ec2.disassociate_address(public_ip=address.public_ip)
if not res:
raise EIPException('disassociation failed')
return {'changed': True}
def _find_address_by_ip(ec2, public_ip):
try:
return ec2.get_all_addresses([public_ip])[0]
except boto.exception.EC2ResponseError as e:
if "Address '{}' not found.".format(public_ip) not in e.message:
raise
def _find_address_by_device_id(ec2, device_id, isinstance=True):
if isinstance:
addresses = ec2.get_all_addresses(None, {'instance-id': device_id})
else:
addresses = ec2.get_all_addresses(None, {'network-interface-id': device_id})
if addresses:
return addresses[0]
def find_address(ec2, public_ip, device_id, isinstance=True):
""" Find an existing Elastic IP address """
if public_ip:
return _find_address_by_ip(ec2, public_ip)
elif device_id and isinstance:
return _find_address_by_device_id(ec2, device_id)
elif device_id:
return _find_address_by_device_id(ec2, device_id, isinstance=False)
def address_is_associated_with_device(ec2, address, device_id, isinstance=True):
""" Check if the elastic IP is currently associated with the device """
address = ec2.get_all_addresses(address.public_ip)
if address:
if isinstance:
return address and address[0].instance_id == device_id
else:
return address and address[0].network_interface_id == device_id
return False
def allocate_address(ec2, domain, reuse_existing_ip_allowed):
""" Allocate a new elastic IP address (when needed) and return it """
if reuse_existing_ip_allowed:
domain_filter = {'domain': domain or 'standard'}
all_addresses = ec2.get_all_addresses(filters=domain_filter)
if domain == 'vpc':
unassociated_addresses = [a for a in all_addresses
if not a.association_id]
else:
unassociated_addresses = [a for a in all_addresses
if not a.instance_id]
if unassociated_addresses:
return unassociated_addresses[0]
return ec2.allocate_address(domain=domain)
def release_address(ec2, address, check_mode):
""" Release a previously allocated elastic IP address """
# If we're in check mode, nothing else to do
if not check_mode:
if not address.release():
EIPException('release failed')
return {'changed': True}
def find_device(ec2, module, device_id, isinstance=True):
""" Attempt to find the EC2 instance and return it """
if isinstance:
try:
reservations = ec2.get_all_reservations(instance_ids=[device_id])
except boto.exception.EC2ResponseError as e:
module.fail_json(msg=str(e))
if len(reservations) == 1:
instances = reservations[0].instances
if len(instances) == 1:
return instances[0]
else:
try:
interfaces = ec2.get_all_network_interfaces(network_interface_ids=[device_id])
except boto.exception.EC2ResponseError as e:
module.fail_json(msg=str(e))
if len(interfaces) == 1:
return interfaces[0]
raise EIPException("could not find instance" + device_id)
def ensure_present(ec2, module, domain, address, device_id,
reuse_existing_ip_allowed, check_mode, isinstance=True):
changed = False
# Return the EIP object since we've been given a public IP
if not address:
if check_mode:
return {'changed': True}
address = allocate_address(ec2, domain, reuse_existing_ip_allowed)
changed = True
if device_id:
# Allocate an IP for instance since no public_ip was provided
if isinstance:
instance = find_device(ec2, module, device_id)
if reuse_existing_ip_allowed:
if len(instance.vpc_id) > 0 and domain is None:
raise EIPException("You must set 'in_vpc' to true to associate an instance with an existing ip in a vpc")
# Associate address object (provided or allocated) with instance
assoc_result = associate_ip_and_device(ec2, address, device_id,
check_mode)
else:
instance = find_device(ec2, module, device_id, isinstance=False)
# Associate address object (provided or allocated) with instance
assoc_result = associate_ip_and_device(ec2, address, device_id,
check_mode, isinstance=False)
if instance.vpc_id:
domain = 'vpc'
changed = changed or assoc_result['changed']
return {'changed': changed, 'public_ip': address.public_ip, 'allocation_id': address.allocation_id}
def ensure_absent(ec2, domain, address, device_id, check_mode, isinstance=True):
if not address:
return {'changed': False}
# disassociating address from instance
if device_id:
if isinstance:
return disassociate_ip_and_device(ec2, address, device_id,
check_mode)
else:
return disassociate_ip_and_device(ec2, address, device_id,
check_mode, isinstance=False)
# releasing address
else:
return release_address(ec2, address, check_mode)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
device_id=dict(required=False, aliases=['instance_id']),
public_ip=dict(required=False, aliases=['ip']),
state=dict(required=False, default='present',
choices=['present', 'absent']),
in_vpc=dict(required=False, type='bool', default=False),
reuse_existing_ip_allowed=dict(required=False, type='bool',
default=False),
release_on_disassociation=dict(required=False, type='bool', default=False),
wait_timeout=dict(default=300),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
ec2 = ec2_connect(module)
device_id = module.params.get('device_id')
instance_id = module.params.get('instance_id')
public_ip = module.params.get('public_ip')
state = module.params.get('state')
in_vpc = module.params.get('in_vpc')
domain = 'vpc' if in_vpc else None
reuse_existing_ip_allowed = module.params.get('reuse_existing_ip_allowed')
release_on_disassociation = module.params.get('release_on_disassociation')
if instance_id:
warnings = ["instance_id is no longer used, please use device_id going forward"]
is_instance = True
device_id = instance_id
else:
if device_id and device_id.startswith('i-'):
is_instance = True
elif device_id:
is_instance = False
try:
if device_id:
address = find_address(ec2, public_ip, device_id, isinstance=is_instance)
else:
address = False
if state == 'present':
if device_id:
result = ensure_present(ec2, module, domain, address, device_id,
reuse_existing_ip_allowed,
module.check_mode, isinstance=is_instance)
else:
address = allocate_address(ec2, domain, reuse_existing_ip_allowed)
result = {'changed': True, 'public_ip': address.public_ip, 'allocation_id': address.allocation_id}
else:
if device_id:
disassociated = ensure_absent(ec2, domain, address, device_id, module.check_mode, isinstance=is_instance)
if release_on_disassociation and disassociated['changed']:
released = release_address(ec2, address, module.check_mode)
result = {'changed': True, 'disassociated': disassociated, 'released': released}
else:
result = {'changed': disassociated['changed'], 'disassociated': disassociated, 'released': {'changed': False}}
else:
address = find_address(ec2, public_ip, None)
released = release_address(ec2, address, module.check_mode)
result = {'changed': released['changed'], 'disassociated': {'changed': False}, 'released': released}
except (boto.exception.EC2ResponseError, EIPException) as e:
module.fail_json(msg=str(e))
if instance_id:
result['warnings'] = warnings
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import * # noqa
from ansible.module_utils.ec2 import * # noqa
if __name__ == '__main__':
main()
| 37.48072
| 130
| 0.663855
|
8b033c0a92091980fdf310b06f7da9555123d0bf
| 591
|
py
|
Python
|
PE052-Permuted multiples/PE052.py
|
christosg88/ProjectEuler
|
6fe131b03b0f81b12a219d5c7d6fe17d9a91eeee
|
[
"MIT"
] | null | null | null |
PE052-Permuted multiples/PE052.py
|
christosg88/ProjectEuler
|
6fe131b03b0f81b12a219d5c7d6fe17d9a91eeee
|
[
"MIT"
] | null | null | null |
PE052-Permuted multiples/PE052.py
|
christosg88/ProjectEuler
|
6fe131b03b0f81b12a219d5c7d6fe17d9a91eeee
|
[
"MIT"
] | null | null | null |
# It can be seen that the number, 125874, and its double, 251748, contain
# exactly the same digits, but in a different order.
# Find the smallest positive integer, x, such that 2x, 3x, 4x, 5x, and 6x,
# contain the same digits.
def FreqOfDigits(number):
digits = [0 for x in xrange(10)]
for digit in list(str(number)):
digits[int(digit)] += 1
return digits
n = 1
flag = True
while flag:
k = 2
while k <= 6:
if FreqOfDigits(n) != FreqOfDigits(k*n):
break
k += 1
if k == 7:
print n
break
n += 1
# 142857
| 24.625
| 74
| 0.585448
|
26d7221d9fb489b32c0be5b75d5a7e25893c5d77
| 451
|
py
|
Python
|
benchmark/python/mailpy.py
|
hywel1994/SARosPerceptionKitti
|
82c307facb5b39e47c510fbdb132962cebf09d2e
|
[
"MIT"
] | 232
|
2020-08-26T10:16:10.000Z
|
2022-03-26T08:39:44.000Z
|
benchmark/python/mailpy.py
|
hywel1994/SARosPerceptionKitti
|
82c307facb5b39e47c510fbdb132962cebf09d2e
|
[
"MIT"
] | 17
|
2018-10-06T14:40:26.000Z
|
2022-03-27T07:50:03.000Z
|
benchmark/python/mailpy.py
|
hywel1994/SARosPerceptionKitti
|
82c307facb5b39e47c510fbdb132962cebf09d2e
|
[
"MIT"
] | 74
|
2018-10-09T19:09:54.000Z
|
2022-03-27T12:38:14.000Z
|
class Mail:
""" Dummy class to print messages without sending e-mails"""
def __init__(self,mailaddress):
pass
def msg(self,msg):
print msg
def finalize(self,success,benchmark,sha_key,mailaddress=None):
if success:
print "Results for %s (benchmark: %s) sucessfully created" % (benchmark,sha_key)
else:
print "Creating results for %s (benchmark: %s) failed" % (benchmark,sha_key)
| 34.692308
| 92
| 0.636364
|
e8fcf3df5412004ab7a8f50fae05fb94d04f8211
| 54
|
py
|
Python
|
cms/__init__.py
|
datakortet/django-cms
|
81f854cff8b23a51a98eee68528526fa854521ed
|
[
"BSD-3-Clause"
] | null | null | null |
cms/__init__.py
|
datakortet/django-cms
|
81f854cff8b23a51a98eee68528526fa854521ed
|
[
"BSD-3-Clause"
] | null | null | null |
cms/__init__.py
|
datakortet/django-cms
|
81f854cff8b23a51a98eee68528526fa854521ed
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
__version__ = '2.4.3-support'
| 18
| 29
| 0.574074
|
406625dcad8f240e602e60c77d67f6a542bce980
| 20,701
|
py
|
Python
|
src/tests/plugins/test_sendmail.py
|
NicsTr/pretix
|
e6d2380d9ed1836cc64a688b2be20d00a8500eab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/tests/plugins/test_sendmail.py
|
NicsTr/pretix
|
e6d2380d9ed1836cc64a688b2be20d00a8500eab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/tests/plugins/test_sendmail.py
|
NicsTr/pretix
|
e6d2380d9ed1836cc64a688b2be20d00a8500eab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import datetime
import pytest
from django.core import mail as djmail
from django.utils.timezone import now
from django_scopes import scopes_disabled
from pretix.base.models import (
Checkin, Event, Item, Order, OrderPosition, Organizer, Team, User,
)
@pytest.fixture
def event():
"""Returns an event instance"""
o = Organizer.objects.create(name='Dummy', slug='dummy')
event = Event.objects.create(
organizer=o, name='Dummy', slug='dummy',
date_from=now(),
plugins='pretix.plugins.sendmail,tests.testdummy',
)
return event
@pytest.fixture
def item(event):
"""Returns an item instance"""
return Item.objects.create(name='Test item', event=event, default_price=13)
@pytest.fixture
def checkin_list(event):
"""Returns an checkin list instance"""
return event.checkin_lists.create(name="Test Checkinlist", all_products=True)
@pytest.fixture
def order(item):
"""Returns an order instance"""
o = Order.objects.create(event=item.event, status=Order.STATUS_PENDING,
expires=now() + datetime.timedelta(hours=1),
total=13, code='DUMMY', email='dummy@dummy.test',
datetime=now(), locale='en')
return o
@pytest.fixture
def pos(order, item):
return OrderPosition.objects.create(order=order, item=item, price=13)
@pytest.fixture
def logged_in_client(client, event):
"""Returns a logged client"""
user = User.objects.create_superuser('dummy@dummy.dummy', 'dummy')
t = Team.objects.create(organizer=event.organizer, can_view_orders=True, can_change_orders=True)
t.members.add(user)
t.limit_events.add(event)
client.force_login(user)
return client
@pytest.fixture
def sendmail_url(event):
"""Returns a url for sendmail"""
url = '/control/event/{orga}/{event}/sendmail/'.format(
event=event.slug, orga=event.organizer.slug,
)
return url
@pytest.mark.django_db
def test_sendmail_view(logged_in_client, sendmail_url, expected=200):
response = logged_in_client.get(sendmail_url)
assert response.status_code == expected
@pytest.mark.django_db
def test_sendmail_simple_case(logged_in_client, sendmail_url, event, order, pos):
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'orders',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == [order.email]
assert djmail.outbox[0].subject == 'Test subject'
assert 'This is a test file for sending mails.' in djmail.outbox[0].body
url = sendmail_url + 'history/'
response = logged_in_client.get(url)
assert response.status_code == 200
assert 'Test subject' in response.rendered_content
@pytest.mark.django_db
def test_sendmail_email_not_sent_if_order_not_match(logged_in_client, sendmail_url, event, order, pos):
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'p',
'recipients': 'orders',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
},
follow=True)
assert 'alert-danger' in response.rendered_content
assert len(djmail.outbox) == 0
@pytest.mark.django_db
def test_sendmail_preview(logged_in_client, sendmail_url, event, order, pos):
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'orders',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
'action': 'preview',
},
follow=True)
assert response.status_code == 200
assert 'E-mail preview' in response.rendered_content
assert len(djmail.outbox) == 0
@pytest.mark.django_db
def test_sendmail_invalid_data(logged_in_client, sendmail_url, event, order, pos):
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'orders',
'items': pos.item_id,
'subject_0': 'Test subject',
},
follow=True)
assert 'has-error' in response.rendered_content
assert len(djmail.outbox) == 0
@pytest.mark.django_db
def test_sendmail_multi_locales(logged_in_client, sendmail_url, event, item):
djmail.outbox = []
event.settings.set('locales', ['en', 'de'])
with scopes_disabled():
o = Order.objects.create(event=item.event, status=Order.STATUS_PAID,
expires=now() + datetime.timedelta(hours=1),
total=13, code='DUMMY', email='dummy@dummy.test',
datetime=now(),
locale='de')
OrderPosition.objects.create(order=o, item=item, price=13)
response = logged_in_client.post(sendmail_url,
{'sendto': 'p',
'recipients': 'orders',
'items': item.pk,
'subject_0': 'Test subject',
'message_0': 'Test message',
'subject_1': 'Benutzer',
'message_1': 'Test nachricht',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == [o.email]
assert djmail.outbox[0].subject == 'Benutzer'
assert 'Test nachricht' in djmail.outbox[0].body
url = sendmail_url + 'history/'
response = logged_in_client.get(url)
assert response.status_code == 200
assert 'Benutzer' in response.rendered_content
assert 'Test nachricht' in response.rendered_content
@pytest.mark.django_db
def test_sendmail_subevents(logged_in_client, sendmail_url, event, order, pos):
event.has_subevents = True
event.save()
with scopes_disabled():
se1 = event.subevents.create(name='Subevent FOO', date_from=now())
se2 = event.subevents.create(name='Bar', date_from=now())
op = order.positions.last()
op.subevent = se1
op.save()
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'orders',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
'subevent': se1.pk,
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'orders',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
'subevent': se2.pk,
},
follow=True)
assert len(djmail.outbox) == 0
url = sendmail_url + 'history/'
response = logged_in_client.get(url)
assert response.status_code == 200
assert 'Subevent FOO' in response.rendered_content
@pytest.mark.django_db
def test_sendmail_placeholder(logged_in_client, sendmail_url, event, order, pos):
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'orders',
'items': pos.item_id,
'subject_0': '{code} Test subject',
'message_0': 'This is a test file for sending mails.',
'action': 'preview',
},
follow=True)
assert response.status_code == 200
assert 'F8VVL' in response.rendered_content
assert len(djmail.outbox) == 0
@pytest.mark.django_db
def test_sendmail_attendee_mails(logged_in_client, sendmail_url, event, order, pos):
p = pos
event.settings.attendee_emails_asked = True
p.attendee_email = 'attendee@dummy.test'
p.save()
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == ['attendee@dummy.test']
assert '/ticket/' in djmail.outbox[0].body
assert '/order/' not in djmail.outbox[0].body
@pytest.mark.django_db
def test_sendmail_both_mails(logged_in_client, sendmail_url, event, order, pos):
p = pos
event.settings.attendee_emails_asked = True
p.attendee_email = 'attendee@dummy.test'
p.save()
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'both',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 2
assert djmail.outbox[0].to == ['attendee@dummy.test']
assert '/ticket/' in djmail.outbox[0].body
assert '/order/' not in djmail.outbox[0].body
assert djmail.outbox[1].to == ['dummy@dummy.test']
assert '/ticket/' not in djmail.outbox[1].body
assert '/order/' in djmail.outbox[1].body
@pytest.mark.django_db
def test_sendmail_both_but_same_address(logged_in_client, sendmail_url, event, order, pos):
p = pos
event.settings.attendee_emails_asked = True
p.attendee_email = 'dummy@dummy.test'
p.save()
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'both',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == ['dummy@dummy.test']
assert '/ticket/' not in djmail.outbox[0].body
assert '/order/' in djmail.outbox[0].body
@pytest.mark.django_db
def test_sendmail_attendee_fallback(logged_in_client, sendmail_url, event, order, pos):
p = pos
event.settings.attendee_emails_asked = True
p.attendee_email = None
p.save()
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': pos.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == ['dummy@dummy.test']
assert '/ticket/' not in djmail.outbox[0].body
assert '/order/' in djmail.outbox[0].body
@pytest.mark.django_db
def test_sendmail_attendee_product_filter(logged_in_client, sendmail_url, event, order, pos):
event.settings.attendee_emails_asked = True
with scopes_disabled():
i2 = Item.objects.create(name='Test item', event=event, default_price=13)
p = pos
p.attendee_email = 'attendee1@dummy.test'
p.save()
order.positions.create(
item=i2, price=0, attendee_email='attendee2@dummy.test'
)
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': i2.pk,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == ['attendee2@dummy.test']
assert '/ticket/' in djmail.outbox[0].body
assert '/order/' not in djmail.outbox[0].body
@pytest.mark.django_db
def test_sendmail_attendee_checkin_filter(logged_in_client, sendmail_url, event, order, checkin_list, item, pos):
event.settings.attendee_emails_asked = True
with scopes_disabled():
chkl2 = event.checkin_lists.create(name="Test Checkinlist 2", all_products=True)
p = pos
p.attendee_email = 'attendee1@dummy.test'
p.save()
pos2 = order.positions.create(item=item, price=0, attendee_email='attendee2@dummy.test')
Checkin.objects.create(position=pos2, list=chkl2)
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': pos2.item_id,
'filter_checkins': 'on',
'checkin_lists': [chkl2.id],
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.'
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == ['attendee2@dummy.test']
assert '/ticket/' in djmail.outbox[0].body
assert '/order/' not in djmail.outbox[0].body
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': pos2.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
'filter_checkins': 'on',
'not_checked_in': 'on',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 1
assert djmail.outbox[0].to == ['attendee1@dummy.test']
assert '/ticket/' in djmail.outbox[0].body
assert '/order/' not in djmail.outbox[0].body
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': pos2.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
'filter_checkins': 'on',
'checkin_lists': [chkl2.id],
'not_checked_in': 'on',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 2
assert djmail.outbox[0].to == ['attendee1@dummy.test']
assert djmail.outbox[1].to == ['attendee2@dummy.test']
# Test that filtering is ignored if filter_checkins is not set
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': pos2.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
'not_checked_in': 'on',
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 2
assert '/ticket/' in djmail.outbox[0].body
assert '/order/' not in djmail.outbox[0].body
assert '/ticket/' in djmail.outbox[1].body
assert '/order/' not in djmail.outbox[1].body
to_emails = set(*zip(*[mail.to for mail in djmail.outbox]))
assert to_emails == {'attendee1@dummy.test', 'attendee2@dummy.test'}
# Test that filtering is ignored if filter_checkins is not set
djmail.outbox = []
response = logged_in_client.post(sendmail_url,
{'sendto': 'na',
'recipients': 'attendees',
'items': pos2.item_id,
'subject_0': 'Test subject',
'message_0': 'This is a test file for sending mails.',
'checkin_lists': [chkl2.id],
},
follow=True)
assert response.status_code == 200
assert 'alert-success' in response.rendered_content
assert len(djmail.outbox) == 2
assert '/ticket/' in djmail.outbox[0].body
assert '/order/' not in djmail.outbox[0].body
assert '/ticket/' in djmail.outbox[1].body
assert '/order/' not in djmail.outbox[1].body
to_emails = set(*zip(*[mail.to for mail in djmail.outbox]))
assert to_emails == {'attendee1@dummy.test', 'attendee2@dummy.test'}
| 42.075203
| 113
| 0.512391
|
393c52380f5eefe326dddb110f15804f9549c7ff
| 5,385
|
py
|
Python
|
argocd_client/models/v1alpha1_sync_operation_resource.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 1
|
2021-09-29T11:57:07.000Z
|
2021-09-29T11:57:07.000Z
|
argocd_client/models/v1alpha1_sync_operation_resource.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 1
|
2020-09-09T00:28:57.000Z
|
2020-09-09T00:28:57.000Z
|
argocd_client/models/v1alpha1_sync_operation_resource.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 2
|
2020-10-13T18:31:59.000Z
|
2021-02-15T12:52:33.000Z
|
# coding: utf-8
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from argocd_client.configuration import Configuration
class V1alpha1SyncOperationResource(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'group': 'str',
'kind': 'str',
'name': 'str',
'namespace': 'str'
}
attribute_map = {
'group': 'group',
'kind': 'kind',
'name': 'name',
'namespace': 'namespace'
}
def __init__(self, group=None, kind=None, name=None, namespace=None, local_vars_configuration=None): # noqa: E501
"""V1alpha1SyncOperationResource - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._group = None
self._kind = None
self._name = None
self._namespace = None
self.discriminator = None
if group is not None:
self.group = group
if kind is not None:
self.kind = kind
if name is not None:
self.name = name
if namespace is not None:
self.namespace = namespace
@property
def group(self):
"""Gets the group of this V1alpha1SyncOperationResource. # noqa: E501
:return: The group of this V1alpha1SyncOperationResource. # noqa: E501
:rtype: str
"""
return self._group
@group.setter
def group(self, group):
"""Sets the group of this V1alpha1SyncOperationResource.
:param group: The group of this V1alpha1SyncOperationResource. # noqa: E501
:type: str
"""
self._group = group
@property
def kind(self):
"""Gets the kind of this V1alpha1SyncOperationResource. # noqa: E501
:return: The kind of this V1alpha1SyncOperationResource. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1alpha1SyncOperationResource.
:param kind: The kind of this V1alpha1SyncOperationResource. # noqa: E501
:type: str
"""
self._kind = kind
@property
def name(self):
"""Gets the name of this V1alpha1SyncOperationResource. # noqa: E501
:return: The name of this V1alpha1SyncOperationResource. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1alpha1SyncOperationResource.
:param name: The name of this V1alpha1SyncOperationResource. # noqa: E501
:type: str
"""
self._name = name
@property
def namespace(self):
"""Gets the namespace of this V1alpha1SyncOperationResource. # noqa: E501
:return: The namespace of this V1alpha1SyncOperationResource. # noqa: E501
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""Sets the namespace of this V1alpha1SyncOperationResource.
:param namespace: The namespace of this V1alpha1SyncOperationResource. # noqa: E501
:type: str
"""
self._namespace = namespace
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1alpha1SyncOperationResource):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1alpha1SyncOperationResource):
return True
return self.to_dict() != other.to_dict()
| 27.060302
| 118
| 0.584401
|
cc3ba182d105330aa6cc95d9962cfb18b2736f87
| 263
|
py
|
Python
|
tests/artificial/transf_Fisher/trend_LinearTrend/cycle_5/ar_12/test_artificial_128_Fisher_LinearTrend_5_12_0.py
|
shaido987/pyaf
|
b9afd089557bed6b90b246d3712c481ae26a1957
|
[
"BSD-3-Clause"
] | 377
|
2016-10-13T20:52:44.000Z
|
2022-03-29T18:04:14.000Z
|
tests/artificial/transf_Fisher/trend_LinearTrend/cycle_5/ar_12/test_artificial_128_Fisher_LinearTrend_5_12_0.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 160
|
2016-10-13T16:11:53.000Z
|
2022-03-28T04:21:34.000Z
|
tests/artificial/transf_Fisher/trend_LinearTrend/cycle_5/ar_12/test_artificial_128_Fisher_LinearTrend_5_12_0.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 63
|
2017-03-09T14:51:18.000Z
|
2022-03-27T20:52:57.000Z
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 5, transform = "Fisher", sigma = 0.0, exog_count = 0, ar_order = 12);
| 37.571429
| 163
| 0.730038
|
daff3f7f30c349ef6741a7bdb435e028930b5dde
| 3,538
|
py
|
Python
|
setup.py
|
cmccully/imageutils
|
3eafeb62cba5c3da44c04f2aaaf88fb4b84a8f5e
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
cmccully/imageutils
|
3eafeb62cba5c3da44c04f2aaaf88fb4b84a8f5e
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
cmccully/imageutils
|
3eafeb62cba5c3da44c04f2aaaf88fb4b84a8f5e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import glob
import os
import sys
import ah_bootstrap
from setuptools import setup
#A dirty hack to get around some early import/configurations ambiguities
if sys.version_info[0] >= 3:
import builtins
else:
import __builtin__ as builtins
builtins._ASTROPY_SETUP_ = True
from astropy_helpers.setup_helpers import (
register_commands, adjust_compiler, get_debug_option, get_package_info)
from astropy_helpers.git_helpers import get_git_devstr
from astropy_helpers.version_helpers import generate_version_py
# Get some values from the setup.cfg
from distutils import config
conf = config.ConfigParser()
conf.read(['setup.cfg'])
metadata = dict(conf.items('metadata'))
PACKAGENAME = metadata.get('package_name', 'packagename')
DESCRIPTION = metadata.get('description', 'Astropy affiliated package')
AUTHOR = metadata.get('author', '')
AUTHOR_EMAIL = metadata.get('author_email', '')
LICENSE = metadata.get('license', 'unknown')
URL = metadata.get('url', 'http://astropy.org')
# Get the long description from the package's docstring
__import__(PACKAGENAME)
package = sys.modules[PACKAGENAME]
LONG_DESCRIPTION = package.__doc__
# Store the package name in a built-in variable so it's easy
# to get from other parts of the setup infrastructure
builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME
# VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386)
VERSION = '0.0.dev'
# Indicates if this version is a release version
RELEASE = 'dev' not in VERSION
if not RELEASE:
VERSION += get_git_devstr(False)
# Populate the dict of setup command overrides; this should be done before
# invoking any other functionality from distutils since it can potentially
# modify distutils' behavior.
cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE)
# Adjust the compiler in case the default on this platform is to use a
# broken one.
adjust_compiler(PACKAGENAME)
# Freeze build information in version.py
generate_version_py(PACKAGENAME, VERSION, RELEASE,
get_debug_option(PACKAGENAME))
# Treat everything in scripts except README.rst as a script to be installed
scripts = [fname for fname in glob.glob(os.path.join('scripts', '*'))
if os.path.basename(fname) != 'README.rst']
# Get configuration information from all of the various subpackages.
# See the docstring for setup_helpers.update_package_files for more
# details.
package_info = get_package_info()
# Add the project-global data
package_info['package_data'].setdefault(PACKAGENAME, [])
package_info['package_data'][PACKAGENAME].append('data/*')
# Include all .c files, recursively, including those generated by
# Cython, since we can not do this in MANIFEST.in with a "dynamic"
# directory name.
c_files = []
for root, dirs, files in os.walk(PACKAGENAME):
for filename in files:
if filename.endswith('.c'):
c_files.append(
os.path.join(
os.path.relpath(root, PACKAGENAME), filename))
package_info['package_data'][PACKAGENAME].extend(c_files)
setup(name=PACKAGENAME,
version=VERSION,
description=DESCRIPTION,
scripts=scripts,
requires=['astropy'],
install_requires=['astropy'],
provides=[PACKAGENAME],
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
url=URL,
long_description=LONG_DESCRIPTION,
cmdclass=cmdclassd,
zip_safe=False,
use_2to3=False,
**package_info
)
| 32.163636
| 79
| 0.742793
|
5aa8d1d4a9c2419efe0b454ea03540951bb6dd5b
| 213
|
py
|
Python
|
openCV_projects/learn/matplot_imgshower.py
|
henryz2004/OpenCV_Experiments
|
b33c4bf65ef302613ec902b680441d469423cdb3
|
[
"MIT"
] | null | null | null |
openCV_projects/learn/matplot_imgshower.py
|
henryz2004/OpenCV_Experiments
|
b33c4bf65ef302613ec902b680441d469423cdb3
|
[
"MIT"
] | null | null | null |
openCV_projects/learn/matplot_imgshower.py
|
henryz2004/OpenCV_Experiments
|
b33c4bf65ef302613ec902b680441d469423cdb3
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
from matplotlib import pyplot as plt
image = cv2.imread("personsmall.jpg", 1)[..., ::-1]
plt.imshow(image, interpolation='bicubic')
plt.xticks([]), plt.yticks([])
plt.show()
| 23.666667
| 52
| 0.676056
|
ad761dfb69de8225bd832a13b91636524268ffdd
| 2,027
|
py
|
Python
|
chrome/browser/resources/PRESUBMIT.py
|
junmin-zhu/chromium-rivertrail
|
eb1a57aca71fe68d96e48af8998dcfbe45171ee1
|
[
"BSD-3-Clause"
] | 7
|
2015-05-20T22:41:35.000Z
|
2021-11-18T19:07:59.000Z
|
chrome/browser/resources/PRESUBMIT.py
|
quisquous/chromium
|
b25660e05cddc9d0c3053b3514f07037acc69a10
|
[
"BSD-3-Clause"
] | 1
|
2015-02-02T06:55:08.000Z
|
2016-01-20T06:11:59.000Z
|
chrome/browser/resources/PRESUBMIT.py
|
jianglong0156/chromium.src
|
d496dfeebb0f282468827654c2b3769b3378c087
|
[
"BSD-3-Clause"
] | 6
|
2016-11-14T10:13:35.000Z
|
2021-01-23T15:29:53.000Z
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Chromium WebUI resources.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl/git cl, and see
http://www.chromium.org/developers/web-development-style-guide for the rules
we're checking against here.
"""
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
resources = input_api.PresubmitLocalPath()
path = input_api.os_path
affected_files = (f.AbsoluteLocalPath() for f in input_api.AffectedFiles())
would_affect_tests = (
path.join(resources, 'PRESUBMIT.py'),
path.join(resources, 'test_presubmit.py'),
path.join(resources, 'web_dev_style', 'css_checker.py'),
path.join(resources, 'web_dev_style', 'js_checker.py'),
)
if any(f for f in affected_files if f in would_affect_tests):
tests = [path.join(resources, 'test_presubmit.py')]
results.extend(
input_api.canned_checks.RunUnitTests(input_api, output_api, tests))
import sys
old_path = sys.path
try:
sys.path = [resources] + old_path
from web_dev_style import css_checker, js_checker
def is_resource(maybe_resource):
f = maybe_resource.AbsoluteLocalPath()
return f.endswith(('.css', '.html', '.js')) and f.startswith(resources)
results.extend(css_checker.CSSChecker(input_api, output_api,
file_filter=is_resource).RunChecks())
results.extend(js_checker.JSChecker(input_api, output_api,
file_filter=is_resource).RunChecks())
finally:
sys.path = old_path
return results
| 34.355932
| 79
| 0.71337
|
b67d220f7d79c9d6c4ca405b4c335d96ac2be3c9
| 6,228
|
py
|
Python
|
iWear/resources/names.py
|
AmitAharoni/iWear2021
|
8e792d8dc1fed733fa1f77b7f4720393970a83d5
|
[
"MIT"
] | 7
|
2021-03-08T18:15:44.000Z
|
2021-05-24T21:04:24.000Z
|
users/recources/names.py
|
AmitAharoni/roo.me
|
eb93ed99ab8635543a5e9489893e6718344ddfd9
|
[
"MIT"
] | 232
|
2021-03-08T15:35:56.000Z
|
2021-06-06T21:33:10.000Z
|
users/recources/names.py
|
AmitAharoni/roo.me
|
eb93ed99ab8635543a5e9489893e6718344ddfd9
|
[
"MIT"
] | 7
|
2021-03-08T13:11:46.000Z
|
2021-03-22T06:58:39.000Z
|
FIRST_NAME_LIST = [
"Aaron", "Abbey", "Abbie", "Jen", "Jena", "Jenae", "Jeniffer", "Jenine", "Jenise",
"Jenna", "Jennefer", "Jennell", "Jennette", "Jenni", "Jennie", "Jennifer",
"Jenniffer", "Jennine", "Jenny", "Jerald", "Jeraldine", "Jeramy", "Jere",
"Jeremiah", "Jeremy", "Jeri", "Jerica", "Jerilyn", "Jerlene", "Jermaine",
"Jerold", "Jerome", "Jeromy", "Jerrell", "Jerri", "Jerrica", "Jerrie", "Jerrod",
"Jerrold", "Jerry", "Jesenia", "Jesica", "Jess", "Jesse", "Jessenia", "Jessi",
"Jessia", "Jessica", "Jessie", "Jessika", "Jestine", "Jesus", "Jesusa", "Jesusita",
"Jetta", "Jettie", "Jewel", "Jewell", "Ji", "Jill", "Jillian", "Jim", "Jimmie",
"Jimmy", "Jin", "Jina", "Jinny", "Jo", "Joan", "Joana", "Joane", "Joanie", "Joann",
"Joanna", "Joanne", "Joannie", "Joaquin", "Joaquina", "Jocelyn", "Jodee", "Jodi",
"Jodie", "Jody", "Joe", "Joeann", "Joel", "Joella", "Joelle", "Joellen", "Joesph",
"Joetta", "Joette", "Joey", "Johana", "Johanna", "Johanne", "John", "Johna", "Johnathan",
"Johnathon", "Johnetta", "Johnette", "Johnie", "Johnna", "Johnnie", "Johnny",
"Johnsie", "Johnson", "Joi", "Joie", "Jolanda", "Joleen", "Jolene", "Jolie", "Joline",
"Jolyn", "Jolynn", "Jon", "Jona", "Jonah", "Jonas", "Jonathan", "Jonathon", "Jone",
"Jonell", "Jonelle", "Jong", "Joni", "Jonie", "Jonna", "Jonnie", "Jordan", "Jordon",
"Jorge", "Jose", "Josef", "Josefa", "Josefina", "Josefine", "Joselyn", "Joseph",
"Josephina", "Josephine", "Josette", "Josh", "Joshua", "Josiah", "Josie", "Joslyn",
"Jospeh", "Josphine", "Josue", "Jovan", "Jovita", "Joy", "Joya", "Joyce", "Joycelyn",
"Joye", "Juan", "Juana", "Juanita", "Jude", "Judi", "Judie", "Judith", "Judson", "Judy",
"Jule", "Julee", "Julene", "Jules", "Juli", "Julia", "Julian", "Juliana", "Juliane",
"Juliann", "Julianna", "Julianne", "Julie", "Julieann", "Julienne", "Juliet", "Julieta",
"Julietta", "Juliette", "Julio", "Julissa", "Julius", "June", "Jung", "Junie", "Junior",
"Junita", "Junko", "Justa", "Justin", "Justina", "Justine", "Jutta", "Ka", "Kacey", "Kaci",
"Kacie", "Kacy", "Kai", "Kaila", "Kaitlin", "Kaitlyn", "Kala", "Kaleigh", "Kaley", "Kali",
"Kallie", "Kalyn", "Kam", "Kamala", "Kami", "Kamilah", "Kandace", "Kandi", "Kandice",
"Kandis", "Kandra", "Kandy", "Kanesha", "Kanisha", "Kara", "Karan", "Kareem", "Kareen",
"Karen", "Karena", "Karey", "Kari", "Karie", "Karima", "Karin", "Karina", "Karine",
"Karisa", "Karissa", "Karl", "Karla", "Karleen", "Karlene", "Karly", "Karlyn", "Karma",
"Karmen", "Karol", "Karole", "Karoline", "Karolyn", "Karon", "Karren", "Karri", "Karrie",
"Karry", "Kary", "Karyl", "Karyn", "Kasandra", "Kasey", "Kasha", "Kasi", "Kasie",
"Kassandra", "Kassie", "Kate", "Katelin", "Katelyn", "Katelynn", "Katerine", "Kathaleen",
"Katharina", "Katharine", "Katharyn", "Kathe", "Katheleen", "Katherin", "Katherina",
"Katherine", "Kathern", "Katheryn", "Kathey", "Kathi", "Kathie", "Kathleen", "Kathlene",
"Kathline", "Kathlyn", "Kathrin", "Kathrine", "Kathryn", "Kathryne", "Kathy", "Kathyrn",
"Kati", "Katia", "Katie", "Katina", "Katlyn", "Katrice", "Katrina", "Kattie", "Katy", "Kay",
"Kayce", "Kaycee", "Kaye", "Kayla", "Kaylee", "Kayleen", "Kayleigh", "Kaylene", "Kazuko",
"Kecia", "Keeley", "Keely", "Keena", "Keenan", "Keesha", "Keiko", "Keila", "Keira", "Keisha",
"Keith", "Keitha", "Keli", "Kelle", "Kellee", "Kelley", "Kelli", "Kellie", "Kelly", "Kellye",
"Kelsey", "Kelsi", "Kelsie", "Kelvin", "Kemberly", "Ken", "Kena", "Kenda", "Kendal",
"Kendall", "Kendra", "Kendrick", "Keneth", "Kenia", "Kenisha", "Kenna", "Kenneth",
"Kennith", "Kenny", "Kent", "Kenton", "Kenya", "Kenyatta", "Kenyetta", "Kera", "Keren",
"Keri", "Kermit", "Kerri", "Kerrie", "Kerry", "Kerstin", "Kesha", "Keshia", "Keturah",
"Keva", "Keven", "Kimberley", "Kimberli", "Kimberlie", "Kimberly", "Kimbery", "Lai",
"Laila", "Laine", "Lajuana", "Lakeesha", "Lakeisha", "Lakendra", "Lakenya", "Lakesha",
"Lakeshia", "Lakia", "Lakiesha", "Lakisha", "Lakita", "Lala", "Lamar", "Lamonica", "Lamont",
"Lan", "Lana", "Lance", "Landon", "Lane", "Lanell"
]
LAST_NAME_LIST = [
"Aanerud", "Aarant", "Aardema", "Aarestad", "Aarhus", "Aaron", "Aarons", "Aaronson", "Aarsvold",
"Aas", "Aasby", "Aase", "Aasen", "Aavang", "Abad", "Abadi", "Ballew", "Balley", "Ballez",
"Balleza", "Balli", "Balliet", "Balliett", "Balliew", "Ballif", "Ballin", "Ballina", "Balling",
"Ballinger", "Ballintyn", "Ballman", "Ballmann", "Ballmer", "Ballog", "Ballon", "Balloon",
"Ballou", "Ballow", "Casado", "Casados", "Casady", "Casagranda", "Casagrande", "Casal",
"Casale", "Casalenda", "Casales", "Casali", "Casaliggi", "Casalman", "Casamayor", "Casamento",
"Casana", "Casanas", "Casano", "Casanova", "Casar", "Casarella", "Casareno", "Casares", "Casarez",
"Casario", "Casarrubias", "Delosh", "Delosier", "Delosreyes", "Delosrios", "Delossanto",
"Delossantos", "Delouise", "Deloy", "Deloye", "Delozier", "Delp", "Delpaggio", "Delpapa",
"Delperdang", "Delph", "Delphia", "Delpiano", "Delpino", "Delpit", "Delpozo", "Delprete",
"Delprincipe", "Delpriore", "Delre", "Delreal", "Embrey", "Embry", "Embs", "Embt", "Embury",
"Emch", "Emde", "Emdee", "Emeche", "Emel", "Emenaha", "Emerald", "Emerich", "Emerick", "Emerling",
"Emerson", "Emert", "Emerton", "Emeru", "Emery", "Emfield", "Emfinger", "Emge", "Emhoff",
"Emick", "Figueras", "Figuerda", "Figueredo", "Figueroa", "Figueron", "Figura", "Figurelli",
"Figures", "Figuroa", "Figurski", "Fijal", "Fijalkowski", "Fike", "Fikes", "Fil", "Fila",
"Filak", "Filan", "Filarecki", "Filary", "Filas", "Filbert", "Gannaway", "Ganner", "Gannett",
"Gannetti", "Gannoe", "Gannon", "Ganns", "Gano", "Ganoe", "Ganong", "Ganotisi", "Ganoung",
"Gans", "Gansburg", "Gansen", "Ganser", "Gansert", "Ganska", "Ganske", "Gant", "Ganter",
"Gantert", "Hornbarger", "Hornbeak", "Hornbeck", "Hornberg", "Hornberger", "Hornbrook",
"Hornbuckle", "Hornby", "Horne", "Horneff", "Horner", "Hornes", "Horney", "Horniak", "Hornick",
"Hornig", "Hornik", "Horning", "Hornish", "Hornlein", "Hornoff", "Hornor"
]
| 84.162162
| 102
| 0.579801
|
068ecd1b0328defd3e60695fb3f156b279bc234f
| 2,473
|
py
|
Python
|
blstm_att/utils_for_clinic.py
|
JoinAJR/blstm_att
|
4c4b598da900c7ee3ac5aa94a4017df1ffe756b7
|
[
"Apache-2.0"
] | null | null | null |
blstm_att/utils_for_clinic.py
|
JoinAJR/blstm_att
|
4c4b598da900c7ee3ac5aa94a4017df1ffe756b7
|
[
"Apache-2.0"
] | null | null | null |
blstm_att/utils_for_clinic.py
|
JoinAJR/blstm_att
|
4c4b598da900c7ee3ac5aa94a4017df1ffe756b7
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
class2label_2 = {'1': 0,
'2': 1, '3': 2,
'4': 3,'5': 4,'6':5}
label2class_2 = {'0': 1,
'1': 2, '2': 3,
'3': 4,'4': 5,'5': 6}
class2label = {'before': 0,'after': 1,'simutaneous': 2,'includes': 3,'is_included': 4,'vague': 5}
label2class = {0: 'before',1: 'after', 2: 'simutaneous',3: 'includes', 4: 'is_included',5: 'vague'}
# class2label = {'Other': 0,
# 'Message-Topic(e1,e2)': 1, 'Message-Topic(e2,e1)': 2,
# 'Product-Producer(e1,e2)': 3, 'Product-Producer(e2,e1)': 4,
# 'Instrument-Agency(e1,e2)': 5, 'Instrument-Agency(e2,e1)': 6,
# 'Entity-Destination(e1,e2)': 7, 'Entity-Destination(e2,e1)': 8,
# 'Cause-Effect(e1,e2)': 9, 'Cause-Effect(e2,e1)': 10,
# 'Component-Whole(e1,e2)': 11, 'Component-Whole(e2,e1)': 12,
# 'Entity-Origin(e1,e2)': 13, 'Entity-Origin(e2,e1)': 14,
# 'Member-Collection(e1,e2)': 15, 'Member-Collection(e2,e1)': 16,
# 'Content-Container(e1,e2)': 17, 'Content-Container(e2,e1)': 18}
#
# label2class = {0: 'Other',
# 1: 'Message-Topic(e1,e2)', 2: 'Message-Topic(e2,e1)',
# 3: 'Product-Producer(e1,e2)', 4: 'Product-Producer(e2,e1)',
# 5: 'Instrument-Agency(e1,e2)', 6: 'Instrument-Agency(e2,e1)',
# 7: 'Entity-Destination(e1,e2)', 8: 'Entity-Destination(e2,e1)',
# 9: 'Cause-Effect(e1,e2)', 10: 'Cause-Effect(e2,e1)',
# 11: 'Component-Whole(e1,e2)', 12: 'Component-Whole(e2,e1)',
# 13: 'Entity-Origin(e1,e2)', 14: 'Entity-Origin(e2,e1)',
# 15: 'Member-Collection(e1,e2)', 16: 'Member-Collection(e2,e1)',
# 17: 'Content-Container(e1,e2)', 18: 'Content-Container(e2,e1)'}
def load_glove(embedding_path, embedding_dim, vocab):
# initial matrix with random uniform
initW = np.random.randn(len(vocab.vocabulary_), embedding_dim).astype(np.float32) / np.sqrt(len(vocab.vocabulary_))
# load any vectors from the word2vec
print("Load glove file {0}".format(embedding_path))
f = open(embedding_path, 'r', encoding='utf8')
for line in f:
splitLine = line.split(' ')
word = splitLine[0]
embedding = np.asarray(splitLine[1:], dtype='float32')
idx = vocab.vocabulary_.get(word)
if idx != 0:
initW[idx] = embedding
return initW
| 47.557692
| 119
| 0.544278
|
a2f82a6e41232c4c3f9cfb6ecb55298ec0da225b
| 3,207
|
py
|
Python
|
myCatkin/src/joystick_drivers/wiimote/nodes/feedbackTester.py
|
sbow/scratch
|
8ac5cd772c8f6c3def6d25ad0402c3f973af2fae
|
[
"MIT"
] | 3
|
2021-01-10T10:52:14.000Z
|
2021-12-31T10:19:25.000Z
|
src/joystick_drivers/wiimote/nodes/feedbackTester.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | null | null | null |
src/joystick_drivers/wiimote/nodes/feedbackTester.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | 1
|
2021-02-04T04:59:32.000Z
|
2021-02-04T04:59:32.000Z
|
#!/usr/bin/env python
import roslib
import rospy
from sensor_msgs.msg import JoyFeedbackArray
from sensor_msgs.msg import JoyFeedback
roslib.load_manifest('wiimote')
INTER_PATTERN_SLEEP_DURATION = 0.2
def talker():
pub = rospy.Publisher('/joy/set_feedback', JoyFeedbackArray, queue_size=1)
rospy.init_node('ledControlTester', anonymous=True)
led0 = JoyFeedback()
led0.type = JoyFeedback.TYPE_LED
led0.id = 0
led1 = JoyFeedback()
led1.type = JoyFeedback.TYPE_LED
led1.id = 1
led2 = JoyFeedback()
led2.type = JoyFeedback.TYPE_LED
led2.id = 2
led3 = JoyFeedback()
led3.type = JoyFeedback.TYPE_LED
led3.id = 3
rum = JoyFeedback()
rum.type = JoyFeedback.TYPE_RUMBLE
rum.id = 0
while not rospy.is_shutdown():
msg = JoyFeedbackArray()
msg.array = [led0, led1, led2, led3, rum]
led0.intensity = 0.2
led3.intensity = 0.2
rum.intensity = 0.49
if msg is not None:
rospy.logdebug("Msg: " + str(msg))
pub.publish(msg)
rospy.sleep(INTER_PATTERN_SLEEP_DURATION)
led0.intensity = 1.0
rum.intensity = 0.51
if msg is not None:
rospy.logdebug("Msg: " + str(msg))
pub.publish(msg)
rospy.sleep(INTER_PATTERN_SLEEP_DURATION)
led0.intensity = 0.0
led1.intensity = 1.0
rum.intensity = 0.0
if msg is not None:
rospy.logdebug("Msg: " + str(msg))
pub.publish(msg)
rospy.sleep(INTER_PATTERN_SLEEP_DURATION)
led1.intensity = 0.0
led2.intensity = 1.0
rum.intensity = 0.7
if msg is not None:
rospy.logdebug("Msg: " + str(msg))
pub.publish(msg)
rospy.sleep(INTER_PATTERN_SLEEP_DURATION)
led2.intensity = 0.0
led3.intensity = 1.0
rum.intensity = 0.49
if msg is not None:
rospy.logdebug("Msg: " + str(msg))
pub.publish(msg)
rospy.sleep(INTER_PATTERN_SLEEP_DURATION)
led1.intensity = 1.0
led2.intensity = 1.0
rum.intensity = 1.0
if msg is not None:
rospy.logdebug("Msg: " + str(msg))
pub.publish(msg)
rospy.sleep(INTER_PATTERN_SLEEP_DURATION)
led0.intensity = 1.0
led1.intensity = 0.4
led2.intensity = 0.4
msg.array = [led0, led1, led2]
if msg is not None:
rospy.logdebug("Msg: " + str(msg))
pub.publish(msg)
rospy.sleep(INTER_PATTERN_SLEEP_DURATION)
if __name__ == '__main__':
print("\n ****************************************************************\n")
print("**** You should see six LED on/off configurations, and feel Rumbles! ****")
print("\n **************************************************************")
print("[off, off, off, off]")
print("[on, off, off, off]")
print("[off, on, off, off]")
print("[off, off, on, off]")
print("[off, off, off, on ]")
print("[off, on, on, on ]")
print("[on, off, off, on ]")
try:
talker()
except rospy.ROSInterruptException:
pass
| 27.177966
| 91
| 0.549735
|
ec363e03f2908e9a72d76e53000773a56b488809
| 2,708
|
py
|
Python
|
machine-learning-ex6/ex6/ex6 in python.py
|
Louay-Ben-nessir/Assignments-in-octave-and-python
|
0e01f19a3f712dc1f293327eaf121d518cadcd29
|
[
"MIT"
] | 1
|
2021-10-01T20:59:44.000Z
|
2021-10-01T20:59:44.000Z
|
machine-learning-ex6/ex6/ex6 in python.py
|
Louay-Ben-nessir/Assignments-in-octave-and-python
|
0e01f19a3f712dc1f293327eaf121d518cadcd29
|
[
"MIT"
] | null | null | null |
machine-learning-ex6/ex6/ex6 in python.py
|
Louay-Ben-nessir/Assignments-in-octave-and-python
|
0e01f19a3f712dc1f293327eaf121d518cadcd29
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.io
import sklearn
import matplotlib.pyplot as plt
def plot(X,y,model=False,clear=False):
plt.close()#clear any other plot
type_0=np.matrix( [(X[i,0],X[i,1]) for i in range(y.shape[0]) if not y[i]] )
type_1=np.matrix( [(X[i,0],X[i,1]) for i in range(y.shape[0]) if y[i]] ) #could replace with a set or np.unique?
plt.plot(type_0[:,0],type_0[:,1], 'yo')
plt.plot(type_1[:,0],type_1[:,1], 'kx')
if type(model)==sklearn.svm._classes.SVC: # im sure you can improve this
l1 = np.array([min(X[:,0]),max(X[:,0])])
l2=-(model.coef_[0,0]*l1 +model.intercept_ )/model.coef_[0,1] #l2=(model.coef_[0,1] + model.coef_[0,0]*l1)
plt.plot(l1, l2, '-b')
def gaussianKernel(x1, x2, sigma):
temp=np.matrix(x1-x2)
return np.exp( (temp*temp.transpose() )/(-2*(sigma**2) ))
def gaussianKernelGramMatrix(X1,X2,sigma): # this only works with two features :( maybe add recursion and enmurate is
gram_matrix = np.zeros((X1.shape[0], X2.shape[0])) # ur imlementation sucked buddy
for i, x1 in enumerate(X1):
for j, x2 in enumerate(X2):
gram_matrix[i, j] = gaussianKernel(x1, x2, sigma)
return gram_matrix
c = 100
Data=scipy.io.loadmat('ex6data1.mat')
X,y=Data['X'],np.matrix(Data['y']).A1
model = sklearn.svm.SVC(C=c, kernel="linear", tol=1e-3).fit(X,y)#,max_iter=100
plot(X,y,model)
plt.show()
sim=gaussianKernel(np.matrix([1,2,1]), np.matrix([0, 4, -1]) , 2) #0.324652
Data=scipy.io.loadmat('ex6data2.mat')
X,y=Data['X'],np.matrix(Data['y']).A1
c = 1
sigma=0.1
'''model = sklearn.svm.SVC(C = c, kernel="precomputed", tol=1e-3).fit( gaussianKernelGramMatrix(X,X,sigma) ,y)
plot(X,y)
x1plot = np.linspace(X[:,0].min(), X[:,0].max(), 100).T
x2plot = np.linspace(X[:,1].min(), X[:,1].max(), 100).T
X1, X2 = np.meshgrid(x1plot, x2plot)
vals = np.zeros(X1.shape)
for i in range(X1.shape[1]):
this_X = np.column_stack((X1[:, i], X2[:, i]))
vals[:, i] = model.predict(gaussianKernelGramMatrix(this_X, X,sigma))
plt.contour(X1, X2, vals, colors="blue", levels=[0,0])
plt.show()
Data=scipy.io.loadmat('ex6data3.mat')
X,y=Data['X'],np.matrix(Data['y']).A1
model = sklearn.svm.SVC(C = c, kernel="precomputed", tol=1e-3).fit( gaussianKernelGramMatrix(X,X,sigma) ,y)
plot(X,y)
x1plot = np.linspace(X[:,0].min(), X[:,0].max(), 100).T
x2plot = np.linspace(X[:,1].min(), X[:,1].max(), 100).T
X1, X2 = np.meshgrid(x1plot, x2plot)
vals = np.zeros(X1.shape)
for i in range(X1.shape[1]):
this_X = np.column_stack((X1[:, i], X2[:, i]))
vals[:, i] = model.predict(gaussianKernelGramMatrix(this_X, X,sigma))
plt.contour(X1, X2, vals, colors="blue", levels=[0,0])
plt.show()'''
| 35.631579
| 119
| 0.630355
|
a903863386d75c3c92f3cfed94940c8833979241
| 1,398
|
py
|
Python
|
monk/system_unit_tests/gluon/test_loss_crossentropy.py
|
Shreyashwaghe/monk_v1
|
4ee4d9483e8ffac9b73a41f3c378e5abf5fc799b
|
[
"Apache-2.0"
] | 7
|
2020-07-26T08:37:29.000Z
|
2020-10-30T10:23:11.000Z
|
monk/system_unit_tests/gluon/test_loss_crossentropy.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 9
|
2020-01-28T21:40:39.000Z
|
2022-02-10T01:24:06.000Z
|
monk/system_unit_tests/gluon/test_loss_crossentropy.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 1
|
2020-10-07T12:57:44.000Z
|
2020-10-07T12:57:44.000Z
|
import os
import sys
sys.path.append("../../../monk/");
import psutil
from gluon_prototype import prototype
from compare_prototype import compare
from common import print_start
from common import print_status
import mxnet as mx
import numpy as np
from gluon.losses.return_loss import load_loss
def test_loss_crossentropy(system_dict):
forward = True;
test = "test_loss_crossentropy";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
gtf = prototype(verbose=0);
gtf.Prototype("sample-project-1", "sample-experiment-1");
label = np.random.rand(1, 1);
label = mx.nd.array(label);
y = np.random.rand(1, 5);
y = mx.nd.array(y);
gtf.loss_crossentropy();
load_loss(gtf.system_dict);
loss_obj = gtf.system_dict["local"]["criterion"];
loss_val = loss_obj(y, label);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
return system_dict
| 27.411765
| 69
| 0.61731
|
288fb5d9d5bddef99103e05cdc6c80eb52146ab2
| 6,998
|
py
|
Python
|
pydocumentdb/execution_context/execution_dispatcher.py
|
afiram/azure-documentdb-python-v2.2.1
|
e6af4adac47b2dcd9774f20659a61d23c0a09971
|
[
"MIT"
] | 1
|
2017-11-03T21:11:48.000Z
|
2017-11-03T21:11:48.000Z
|
pydocumentdb/execution_context/execution_dispatcher.py
|
afiram/azure-documentdb-python-v2.2.1
|
e6af4adac47b2dcd9774f20659a61d23c0a09971
|
[
"MIT"
] | null | null | null |
pydocumentdb/execution_context/execution_dispatcher.py
|
afiram/azure-documentdb-python-v2.2.1
|
e6af4adac47b2dcd9774f20659a61d23c0a09971
|
[
"MIT"
] | 3
|
2017-11-06T16:13:48.000Z
|
2020-03-30T01:09:16.000Z
|
#The MIT License (MIT)
#Copyright (c) 2014 Microsoft Corporation
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
"""Internal class for proxy query execution context implementation in the Azure DocumentDB database service.
"""
import json
from six.moves import xrange
from pydocumentdb.errors import HTTPFailure
from pydocumentdb.execution_context.base_execution_context import _QueryExecutionContextBase
from pydocumentdb.execution_context.base_execution_context import _DefaultQueryExecutionContext
from pydocumentdb.execution_context.query_execution_info import _PartitionedQueryExecutionInfo
from pydocumentdb.execution_context import endpoint_component
from pydocumentdb.execution_context import multi_execution_aggregator
from pydocumentdb.http_constants import StatusCodes, SubStatusCodes
class _ProxyQueryExecutionContext(_QueryExecutionContextBase):
'''
This class represents a proxy execution context wrapper:
- By default uses _DefaultQueryExecutionContext
- if backend responds a 400 error code with a Query Execution Info
it switches to _MultiExecutionContextAggregator
'''
def __init__(self, client, resource_link, query, options, fetch_function):
'''
Constructor
'''
super(self.__class__, self).__init__(client, options)
self._execution_context = _DefaultQueryExecutionContext(client, options, fetch_function)
self._resource_link = resource_link
self._query = query
self._fetch_function = fetch_function
def next(self):
"""Returns the next query result.
:Returns:
dict. the next query result.
:Raises:
StopIteration. If no more result is left.
"""
try:
return next(self._execution_context)
except HTTPFailure as e:
if self._is_partitioned_execution_info(e):
query_execution_info = self._get_partitioned_execution_info(e)
self._execution_context = self._create_pipelined_execution_context(query_execution_info)
else:
raise e
return next(self._execution_context)
def fetch_next_block(self):
"""Returns a block of results.
This method only exists for backward compatibility reasons. (Because QueryIterable
has exposed fetch_next_block api).
:Returns:
list. List of results.
"""
try:
return self._execution_context.fetch_next_block()
except HTTPFailure as e:
if self._is_partitioned_execution_info(e):
query_execution_info = self._get_partitioned_execution_info(e)
self._execution_context = self._create_pipelined_execution_context(query_execution_info)
else:
raise e
return self._execution_context.fetch_next_block()
def _is_partitioned_execution_info(self, e):
return e.status_code == StatusCodes.BAD_REQUEST and e.sub_status == SubStatusCodes.CROSS_PARTITION_QUERY_NOT_SERVABLE
def _get_partitioned_execution_info(self, e):
error_msg = json.loads(e._http_error_message)
return _PartitionedQueryExecutionInfo(json.loads(error_msg['additionalErrorInfo']))
def _create_pipelined_execution_context(self, query_execution_info):
assert self._resource_link, "code bug, resource_link has is required."
execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, self._resource_link, self._query, self._options, query_execution_info)
return _PipelineExecutionContext(self._client, self._options, execution_context_aggregator, query_execution_info)
class _PipelineExecutionContext(_QueryExecutionContextBase):
DEFAULT_PAGE_SIZE = 1000
def __init__(self, client, options, execution_context, query_execution_info):
'''
Constructor
'''
super(self.__class__, self).__init__(client, options)
if options.get('maxItemCount'):
self._page_size = options['maxItemCount']
else:
self._page_size = _PipelineExecutionContext.DEFAULT_PAGE_SIZE
self._execution_context = execution_context
self._endpoint = endpoint_component._QueryExecutionEndpointComponent(execution_context)
order_by = query_execution_info.get_order_by()
if (order_by):
self._endpoint = endpoint_component._QueryExecutionOrderByEndpointComponent(self._endpoint)
top = query_execution_info.get_top()
if not (top is None):
self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, top)
aggregates = query_execution_info.get_aggregates()
if aggregates:
self._endpoint = endpoint_component._QueryExecutionAggregateEndpointComponent(self._endpoint, aggregates)
def next(self):
"""Returns the next query result.
:Returns:
dict. the next query result.
:Raises:
StopIteration. If no more result is left.
"""
return next(self._endpoint)
def fetch_next_block(self):
"""Returns a block of results.
This method only exists for backward compatibility reasons. (Because QueryIterable
has exposed fetch_next_block api).
This method internally invokes next() as many times required to collect the
requested fetch size.
:Returns:
list. List of results.
"""
results = []
for cnt in xrange(self._page_size):
try:
results.append(next(self))
except StopIteration:
# no more results
break
return results
| 41.164706
| 187
| 0.691483
|
4f7fd5107cc0602f791e26e1adac74edbb4d0d31
| 973
|
py
|
Python
|
pysocialforce/utils/logging.py
|
Femme-js/PySocialForceJ
|
274914072f08edb1169c5f63c2d8399ad4304833
|
[
"MIT"
] | 42
|
2020-07-07T17:23:15.000Z
|
2022-03-30T00:07:17.000Z
|
pysocialforce/utils/logging.py
|
Femme-js/PySocialForceJ
|
274914072f08edb1169c5f63c2d8399ad4304833
|
[
"MIT"
] | 3
|
2020-10-25T22:01:06.000Z
|
2020-12-01T13:56:34.000Z
|
pysocialforce/utils/logging.py
|
Femme-js/PySocialForceJ
|
274914072f08edb1169c5f63c2d8399ad4304833
|
[
"MIT"
] | 21
|
2020-06-29T13:25:55.000Z
|
2022-03-11T13:41:35.000Z
|
"""General utilities"""
import logging
from functools import wraps
from time import time
# Create a custom logger
logger = logging.getLogger("root")
logger.setLevel(logging.DEBUG)
FORMAT = "%(levelname)s:[%(filename)s:%(lineno)s %(funcName)20s() ] %(message)s"
# Create handlers
c_handler = logging.StreamHandler()
f_handler = logging.FileHandler("file.log")
c_handler.setLevel(logging.DEBUG)
f_handler.setLevel(logging.WARNING)
# Create formatters and add it to handlers
c_format = logging.Formatter(FORMAT)
f_format = logging.Formatter("%(asctime)s|" + FORMAT)
c_handler.setFormatter(c_format)
f_handler.setFormatter(f_format)
# Add handlers to the logger
logger.addHandler(c_handler)
logger.addHandler(f_handler)
def timeit(f):
@wraps(f)
def wrap(*args, **kw):
ts = time()
result = f(*args, **kw)
te = time()
logger.debug(f"Timeit: {f.__name__}({args}, {kw}), took: {te-ts:2.4f} sec")
return result
return wrap
| 25.605263
| 83
| 0.704008
|
372c1c343be12e685d93604110093f88067364c7
| 6,641
|
py
|
Python
|
pretix_juvare_notify/tasks.py
|
rixx/pretix-juvare-notify
|
97325a7821ea98134e5b0ce41076b4ceb8d0eb24
|
[
"Apache-2.0"
] | null | null | null |
pretix_juvare_notify/tasks.py
|
rixx/pretix-juvare-notify
|
97325a7821ea98134e5b0ce41076b4ceb8d0eb24
|
[
"Apache-2.0"
] | null | null | null |
pretix_juvare_notify/tasks.py
|
rixx/pretix-juvare-notify
|
97325a7821ea98134e5b0ce41076b4ceb8d0eb24
|
[
"Apache-2.0"
] | null | null | null |
import json
import logging
import requests
from django.db.models import Exists, OuterRef
from django_scopes import scope, scopes_disabled
from i18nfield.strings import LazyI18nString
from pretix.base.email import get_email_context
from pretix.base.i18n import language
from pretix.base.models import (
Event,
InvoiceAddress,
Order,
OrderPosition,
Organizer,
SubEvent,
User,
)
from pretix.base.services.mail import TolerantDict
from pretix.celery_app import app
logger = logging.getLogger(__name__)
@app.task()
def juvare_send_task(text: str, to: str, event: int):
if not (text and to and event):
return
with scopes_disabled():
event = Event.objects.get(id=event)
with scope(organizer=event.organizer):
client_secret = event.settings.juvare_client_secret # global setting
url = (
event.settings.juvare_api_url or "https://notify.lab.juvare.com/manage/"
) # global setting
if not (client_secret and url):
return
if url[-1] != "/":
url += "/"
url += "api/v3/notification"
to = to.replace(" ", "")
if event.settings.juvare_text_signature:
text = f"{text}\n\n{event.settings.juvare_text_signature}"
body = [
{
"type": "sms",
"addresses": [to],
"message": text,
"repeatCount": 0,
"repeatDelay": 0,
"consentless": True,
"billingId": event.settings.juvare_billing_id,
}
]
response = requests.post(
url,
data=json.dumps(body),
headers={
"accept": "application/json",
"x-client-secret": client_secret,
"Content-Type": "application/json",
},
)
try:
response.raise_for_status()
message = f"SUCCESS: Sent Juvare Notify message with billing ID: {body[0]['billingId']} for {event.slug}. "
try:
content = response.json()
if content:
message += f"Response: {content}"
else:
message += "No details were provided."
except Exception:
message += "No details were provided."
logger.info(message)
except Exception as e:
message = f"Failed to send Juvare Notify message with billing ID {body[0]['billingId']} for {event.slug}. "
message += f"Error: {e}. "
message += f"Received API response {response.status_code}."
try:
content = response.json()
if content and isinstance(content, dict) and content.get("message"):
message += f"It said: {content['message']}"
else:
message += "It contained no further message to explain the error."
except Exception:
message += "It had no readable JSON body with details."
logger.error(message)
def juvare_send(*args, **kwargs):
juvare_send_task.apply_async(args=args, kwargs=kwargs)
@app.task(acks_late=True)
def send_bulk_sms(event: Event, user: int, message: dict, orders: list) -> None:
with scopes_disabled():
event = Event.objects.all().select_related("organizer").get(pk=event)
with scope(organizer=event.organizer):
orders = Order.objects.filter(pk__in=orders, event=event)
message = LazyI18nString(message)
user = User.objects.get(pk=user) if user else None
logger.debug(
f"Sending bulk SMS to {len(orders)} recipients for event {event.slug}"
)
success = 0
error = 0
skip = 0
for o in orders:
if not o.phone:
skip += 1
else:
try:
ia = o.invoice_address
except InvoiceAddress.DoesNotExist:
ia = InvoiceAddress(order=o)
try:
with language(o.locale, event.settings.region):
email_context = get_email_context(
event=event, order=o, position_or_address=ia
)
text = str(message).format_map(TolerantDict(email_context))
juvare_send(text=text, to=str(o.phone), event=event.pk)
o.log_action(
"pretix.plugins.pretix_juvare_notify.order.sms.sent",
user=user,
data={"message": text, "recipient": str(o.phone)},
)
success += 1
except Exception as e:
logger.error(
f"Failed to send part of a bulk message for order {o.code} ({event.slug}):\n{e}"
)
error += 1
logger.debug(
f"Sending bulk SMS to {len(orders)} recipients for event {event.slug} resulted in {success} successful messages, {error} errors, {skip} skipped."
)
@app.task()
def send_subevent_reminders(subevent: int):
from .models import SubEventReminder
with scope(
organizer=Organizer.objects.filter(events__subevents__pk=subevent).first()
):
subevent = SubEvent.objects.get(pk=subevent)
status, created = SubEventReminder.objects.get_or_create(subevent=subevent)
if not created:
return
logger.debug(
f"Sending bulk reminders for subevent {subevent} ({subevent.event.slug})"
)
opq = OrderPosition.objects.filter(
order=OuterRef("pk"),
canceled=False,
subevent=subevent,
)
orders = (
Order.objects.filter(
phone__isnull=False,
event=subevent.event,
status=Order.STATUS_PAID,
)
.annotate(match_pos=Exists(opq))
.filter(match_pos=True)
.distinct()
)
orders = orders.values_list("pk", flat=True)
logger.debug(f"Found {len(orders)} orders to be sent reminders.")
if orders:
send_bulk_sms.apply_async(
kwargs={
"user": None,
"orders": list(orders),
"message": subevent.event.settings.juvare_reminder_text.data,
"event": subevent.event.pk,
}
)
status.status = "f"
status.save()
| 35.137566
| 157
| 0.538774
|
2cccb438c2156ff0bce189a4f769e9f1685bbc9d
| 1,821
|
py
|
Python
|
model/Data.py
|
Apokefal/python_training
|
3efb1d3c0d6ceb66ee48fa61d6dce0f2c7cdbc3e
|
[
"Apache-2.0"
] | null | null | null |
model/Data.py
|
Apokefal/python_training
|
3efb1d3c0d6ceb66ee48fa61d6dce0f2c7cdbc3e
|
[
"Apache-2.0"
] | null | null | null |
model/Data.py
|
Apokefal/python_training
|
3efb1d3c0d6ceb66ee48fa61d6dce0f2c7cdbc3e
|
[
"Apache-2.0"
] | null | null | null |
###
from sys import maxsize
class Group:
def __init__(self, name=None, header=None, footer=None, id=None):
self.name=name
self.header=header
self.footer=footer
self.id = id
def __repr__(self):
return "%s:%s;%s;%s" % (self.id, self.name, self.header, self.footer)
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id) and self.name == other.name
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
class UsFo:
def __init__(self, firstname=None, lastname=None, address=None, id=None,
homephone=None, mobilephone=None, workphone=None, secondaryphone=None, company=None, all_phones_from_home_page=None,
email=None, email2=None, email3=None, all_emails_from_home_page=None):
self.firstname = firstname
self.lastname = lastname
self.address = address
self.homephone = homephone
self.mobilephone = mobilephone
self.workphone = workphone
self.company = company
self.secondaryphone = secondaryphone
self.email = email
self.email2 = email2
self.email3 = email3
self.all_phones_from_home_page = all_phones_from_home_page
self.all_emails_from_home_page = all_emails_from_home_page
self.id = id
def __repr__(self):
return "%s:%s:%s" % (self.id, self.lastname, self.firstname)
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id)\
and self.lastname == other.lastname and self.firstname == other.firstname
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
| 28.904762
| 133
| 0.621636
|
5f49ef00ee9b23c8787832eee16597beb49b0bbb
| 1,769
|
py
|
Python
|
aliyun-python-sdk-dcdn/aliyunsdkdcdn/request/v20180115/DescribeDcdnCertificateDetailRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-dcdn/aliyunsdkdcdn/request/v20180115/DescribeDcdnCertificateDetailRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-dcdn/aliyunsdkdcdn/request/v20180115/DescribeDcdnCertificateDetailRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdcdn.endpoint import endpoint_data
class DescribeDcdnCertificateDetailRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'dcdn', '2018-01-15', 'DescribeDcdnCertificateDetail')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_CertName(self):
return self.get_query_params().get('CertName')
def set_CertName(self,CertName):
self.add_query_param('CertName',CertName)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
| 36.102041
| 82
| 0.770492
|
8c9a16900a024b6bf0b6864b3a8210c1d16e6416
| 6,346
|
py
|
Python
|
cloudify_gcp/iam/policy_binding.py
|
cloudify-cosmo/cloudify-gcp-plugin
|
c70faee0555070f7fc67f0001395eaafb681b23c
|
[
"Apache-2.0"
] | 4
|
2016-10-24T17:42:07.000Z
|
2020-05-31T00:34:07.000Z
|
cloudify_gcp/iam/policy_binding.py
|
cloudify-cosmo/cloudify-gcp-plugin
|
c70faee0555070f7fc67f0001395eaafb681b23c
|
[
"Apache-2.0"
] | 35
|
2015-04-30T20:14:01.000Z
|
2022-02-03T21:35:54.000Z
|
cloudify_gcp/iam/policy_binding.py
|
cloudify-cosmo/cloudify-gcp-plugin
|
c70faee0555070f7fc67f0001395eaafb681b23c
|
[
"Apache-2.0"
] | 13
|
2015-04-17T16:42:03.000Z
|
2021-06-24T04:12:14.000Z
|
# #######
# Copyright (c) 2018-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
from cloudify import ctx
from Crypto.Random import atfork
from cloudify.decorators import operation
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from oauth2client.service_account import _JWTAccessCredentials
from .. import gcp
from .. import utils
from .. import constants
EMPTY_POLICY_BINDING = {'bindings': []}
class PolicyBinding(gcp.GoogleCloudPlatform):
# https://cloud.google.com/resource-manager/reference/rest/v1/Policy
# Example Gcloud Command:
# gcloud iam service-accounts add-iam-policy-binding
# [Service Account]
# --member='user:[Service Account]'
# --role=projects/[Project]/roles/[Role Name]
def __init__(self, config, logger, resource, policy):
super(gcp.GoogleCloudPlatform, self).__init__(
config,
logger,
scope=constants.CLOUDRESOURCES_SCOPE,
discovery=constants.CLOUDRESOURCES_DISCOVERY)
self.resource = resource or config['project']
self.new_policy = policy
def get_credentials(self, scope):
# check
# run: gcloud beta auth application-default login
# look to ~/.config/gcloud/application_default_credentials.json
atfork()
if hasattr(self.auth, 'get'):
creds_func = _JWTAccessCredentials.from_json_keyfile_dict
else:
creds_func = _JWTAccessCredentials.from_json_keyfile_name
return creds_func(self.auth, scopes=self.scope)
def create_discovery(self, discovery, scope, api_version):
"""
Create Google Cloud API discovery object and perform authentication.
:param discovery: name of the API discovery to be created
:param scope: scope the API discovery will have
:param api_version: version of the API
:return: discovery object
:raise: GCPError if there is a problem with service account JSON file:
e.g. the file is not under the given path or it has wrong permissions
"""
# Crypto.Random.atfork() must be called here because celery doesn't do
# it
atfork()
try:
credentials = self.get_credentials(scope)
return build(discovery, api_version, credentials=credentials)
except IOError as e:
self.logger.error(str(e))
raise gcp.GCPError(str(e))
@gcp.check_response
def get(self):
request_body = {
'options': {
'requestedPolicyVersion': 3
}
}
try:
return self.discovery.projects().getIamPolicy(
resource=self.resource, body=request_body).execute()
except HttpError as e:
self.logger.error(str(e))
return EMPTY_POLICY_BINDING
@gcp.check_response
def create(self):
# https://cloud.google.com/resource-manager/
# reference/rest/v1/projects/setIamPolicy
policy = self.add_new_policies_to_current_policy()
self.logger.debug('Attempting to update policy {}'.format(policy))
request_body = {
'policy': policy
}
try:
return self.discovery.projects().setIamPolicy(
resource=self.resource,
body=request_body).execute()
except HttpError as e:
error = str(e)
self.logger.error(error)
if '404' in error:
return ctx.operation.retry(
message='Attempting to retry create policy binding: '
'{error}.'.format(error=error))
else:
raise
@gcp.check_response
def delete(self):
# https://cloud.google.com/iam/docs/granting-changing-revoking-access
policy = self.remove_new_policies_from_current_policy()
if policy == EMPTY_POLICY_BINDING:
return EMPTY_POLICY_BINDING
self.logger.debug('Attempting to rollback policy {}'.format(policy))
request_body = {
'policy': policy
}
try:
return self.discovery.projects().setIamPolicy(
resource=self.resource,
body=request_body).execute()
except HttpError as e:
error = str(e)
self.logger.error(error)
if '404' in error:
return EMPTY_POLICY_BINDING
else:
raise
def add_new_policies_to_current_policy(self):
current_policy = deepcopy(self.get())
for binding in self.new_policy['bindings']:
if binding not in current_policy['bindings']:
current_policy['bindings'].append(binding)
return current_policy
def remove_new_policies_from_current_policy(self):
current_policy = self.get()
for n, binding in enumerate(self.new_policy['bindings']):
if binding in current_policy['bindings']:
del current_policy['bindings'][n]
return current_policy
@operation(resumable=True)
@utils.throw_cloudify_exceptions
def create(resource, policy, **_):
if utils.resource_created(ctx, constants.RESOURCE_ID):
return
gcp_config = utils.get_gcp_config()
policybinding = PolicyBinding(
gcp_config,
ctx.logger,
resource,
policy
)
utils.create(policybinding)
ctx.instance.runtime_properties.update(policybinding.get())
@operation(resumable=True)
@utils.throw_cloudify_exceptions
def delete(resource, policy, **_):
gcp_config = utils.get_gcp_config()
policybinding = PolicyBinding(
gcp_config,
ctx.logger,
resource,
policy
)
policybinding.delete()
| 34.868132
| 78
| 0.64324
|
b07dacd94d1a0f15c5d8124239967a3cc59e1e1b
| 389
|
py
|
Python
|
hacky_automation.py
|
nalkpas/MS-E-250B-Project
|
5d8dd590c47858c7bba3ce0682024ac607251271
|
[
"MIT"
] | null | null | null |
hacky_automation.py
|
nalkpas/MS-E-250B-Project
|
5d8dd590c47858c7bba3ce0682024ac607251271
|
[
"MIT"
] | null | null | null |
hacky_automation.py
|
nalkpas/MS-E-250B-Project
|
5d8dd590c47858c7bba3ce0682024ac607251271
|
[
"MIT"
] | null | null | null |
import os
map_names = ['CityGrid_JeffersonCounty_CSV', 'JeffCo_firebreaks']
scenarios = {'CityGrid_JeffersonCounty_CSV': ['InitialValues', 'DefensibleSpace', 'IWUIC', 'Buildings', 'Vegetation'],
'JeffCo_firebreaks': ['InitialValues']}
for map_name in map_names:
for scenario in scenarios[name]:
os.system('python3 propagation_sim.py ' + scenario + ' ' + map_name)
print('\ndone')
| 35.363636
| 118
| 0.737789
|
fa37faf72bd201f3af5011a022c7c02231e43339
| 1,954
|
py
|
Python
|
blackpearl/examples/scrollingtext.py
|
offmessage/blackpearl
|
ffbba460fe7fc7fe4d7e3466f5ff13ea0c081fc5
|
[
"MIT"
] | null | null | null |
blackpearl/examples/scrollingtext.py
|
offmessage/blackpearl
|
ffbba460fe7fc7fe4d7e3466f5ff13ea0c081fc5
|
[
"MIT"
] | null | null | null |
blackpearl/examples/scrollingtext.py
|
offmessage/blackpearl
|
ffbba460fe7fc7fe4d7e3466f5ff13ea0c081fc5
|
[
"MIT"
] | null | null | null |
from blackpearl.modules import Module
from blackpearl.modules import Touch
from blackpearl.projects import Project
from blackpearl.things import Dial
from blackpearl.things import Matrix
from blackpearl.things import Rainbow
from blackpearl.things import Touch
class Scroller(Touch):
listening_for = ['touch',]
hardware_required = [Touch, Matrix, Rainbow,]
def button1_pressed(self):
self.matrix.reset()
self.matrix.addText("This is a great example! ")
self.matrix.scrollspeed = 0.1
self.matrix.loop = True
self.matrix.scroll()
def button2_pressed(self):
self.matrix.scrollspeed = 0.05
def button3_pressed(self):
self.matrix.reset()
self.rainbow.reset()
def button4_pressed(self):
self.matrix.pause()
class SpeedChanger(Module):
listening_for = ['dial',]
hardware_required = [Dial, Matrix,]
def receive(self, message):
value = message['dial']['value']
if value < 200:
spd = 0.25
elif value < 400:
spd = 0.2
elif value < 600:
spd = 0.15
elif value < 800:
spd = 0.1
else:
spd = 0.05
self.matrix.scrollspeed = spd
class Listener(Module):
listening_for = ['matrix',]
hardware_required = [Matrix, Rainbow,]
def receive(self, message):
if 'scroller' in data['matrix']:
status = message['matrix']['scroller']
if status in ['running', 'step', 'loop']:
self.rainbow.set_all(0, 255, 0)
self.rainbow.update()
elif status in ['paused', 'stopped']:
self.rainbow.set_all(255, 0, 0)
self.rainbow.update()
class MyProject(Project):
modules_required = [Scroller, SpeedChanger, Listener, ]
if __name__ == '__main__':
MyProject()
| 27.521127
| 59
| 0.580348
|
df2c41991e69feb16b4181a6b50d86bd0319923d
| 2,609
|
py
|
Python
|
built-in/PyTorch/Official/cv/image_object_detection/YoloV3_ID1790_for_PyTorch/configs/paa/paa_r50_fpn_1x_coco.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 12
|
2020-12-13T08:34:24.000Z
|
2022-03-20T15:17:17.000Z
|
built-in/PyTorch/Official/cv/image_object_detection/YoloV3_ID1790_for_PyTorch/configs/paa/paa_r50_fpn_1x_coco.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 1
|
2022-01-20T03:11:05.000Z
|
2022-01-20T06:53:39.000Z
|
built-in/PyTorch/Official/cv/image_object_detection/YoloV3_ID1790_for_PyTorch/configs/paa/paa_r50_fpn_1x_coco.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 2
|
2021-07-10T12:40:46.000Z
|
2021-12-17T07:55:15.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_base_ = [
'../_base_/datasets/coco_detection.py',
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]
model = dict(
type='PAA',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=1,
add_extra_convs='on_output',
num_outs=5),
bbox_head=dict(
type='PAAHead',
reg_decoded_bbox=True,
score_voting=True,
topk=9,
num_classes=80,
in_channels=256,
stacked_convs=4,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
ratios=[1.0],
octave_base_scale=8,
scales_per_octave=1,
strides=[8, 16, 32, 64, 128]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[0.1, 0.1, 0.2, 0.2]),
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(type='GIoULoss', loss_weight=1.3),
loss_centerness=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)))
# training and testing settings
train_cfg = dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.1,
neg_iou_thr=0.1,
min_pos_iou=0,
ignore_iof_thr=-1),
allowed_border=-1,
pos_weight=-1,
debug=False)
test_cfg = dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.6),
max_per_img=100)
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
| 30.694118
| 74
| 0.609429
|
8fd3e428b84ef022caa836d52dd65ebb77b4d9d4
| 20,235
|
py
|
Python
|
soepy/test/test_unit.py
|
OpenSourceEconomics/soepy
|
4ff40292799e248e356d614c1a7141bd35a912c9
|
[
"MIT"
] | 12
|
2018-12-17T14:46:55.000Z
|
2022-03-28T21:15:33.000Z
|
soepy/test/test_unit.py
|
OpenSourceEconomics/soepy
|
4ff40292799e248e356d614c1a7141bd35a912c9
|
[
"MIT"
] | 129
|
2019-02-18T08:49:39.000Z
|
2022-03-25T16:52:08.000Z
|
soepy/test/test_unit.py
|
OpenSourceEconomics/soepy
|
4ff40292799e248e356d614c1a7141bd35a912c9
|
[
"MIT"
] | 6
|
2019-02-19T15:23:40.000Z
|
2022-03-28T21:16:00.000Z
|
import collections
import numpy as np
import pandas as pd
import random
from random import randrange, randint
from soepy.pre_processing.model_processing import read_model_spec_init
from soepy.pre_processing.model_processing import read_model_params_init
from soepy.exogenous_processes.education import gen_prob_educ_level_vector
from soepy.exogenous_processes.children import gen_prob_child_init_age_vector
from soepy.exogenous_processes.partner import gen_prob_partner_present_vector
from soepy.exogenous_processes.experience import gen_prob_init_exp_vector
from soepy.exogenous_processes.children import gen_prob_child_vector
from soepy.exogenous_processes.partner import gen_prob_partner_arrival
from soepy.exogenous_processes.partner import gen_prob_partner_separation
from soepy.solve.solve_auxiliary import pyth_create_state_space
from soepy.simulate.simulate_python import simulate
from soepy.test.random_init import random_init
from soepy.test.random_init import read_init_file2
from soepy.test.random_init import namedtuple_to_dict
from soepy.test.random_init import init_dict_flat_to_init_dict
from soepy.solve.solve_python import pyth_solve
from soepy.simulate.simulate_auxiliary import pyth_simulate
def test_unit_nan():
"""This test ensures that the data frame only includes individuals that have
completed education.
"""
constr = {
"AGENTS": 200,
"PERIODS": 7,
"EDUC_YEARS": [0, np.random.randint(1, 3), np.random.randint(4, 6)],
}
random_init(constr)
df = simulate("test.soepy.pkl", "test.soepy.yml")
np.testing.assert_equal(
df[df["Education_Level"] == 1]["Period"].min(),
constr["EDUC_YEARS"][1],
)
np.testing.assert_equal(
df[df["Education_Level"] == 2]["Period"].min(),
constr["EDUC_YEARS"][2],
)
def test_unit_init_print():
"""This test ensures that the init file printing process work as intended. For this
purpose we generate random init file specifications import the resulting files,
write the specifications to another init file, import it again and comparing both
initialization dicts
"""
order = [
"GENERAL",
"CONSTANTS",
"EDUC",
"SIMULATION",
"SOLUTION",
"EXOG_PROC",
]
for _ in range(5):
random_init()
model_params_df, _ = read_model_params_init("test.soepy.pkl")
model_spec = read_model_spec_init("test.soepy.yml", model_params_df)
init_dict_flat = namedtuple_to_dict(model_spec)
init_dict = init_dict_flat_to_init_dict(init_dict_flat)
init_dict2 = read_init_file2("test.soepy.yml")
for key in order:
for subkey in init_dict[key].keys():
if not init_dict[key][subkey] == init_dict2[key][subkey]:
raise AssertionError()
def test_unit_data_frame_shape():
"""This test ensures that the shape of the simulated data frame corresponds
to the random specifications of our initialization file.
"""
for _ in range(5):
constr = dict()
constr["AGENTS"] = np.random.randint(10, 100)
constr["PERIODS"] = np.random.randint(7, 10)
constr["EDUC_YEARS"] = [0, np.random.randint(1, 2), np.random.randint(3, 5)]
random_init(constr)
model_params_df, model_params = read_model_params_init("test.soepy.pkl")
model_spec = read_model_spec_init("test.soepy.yml", model_params_df)
prob_educ_level = gen_prob_educ_level_vector(model_spec)
prob_child_age = gen_prob_child_init_age_vector(model_spec)
prob_partner_present = gen_prob_partner_present_vector(model_spec)
prob_exp_ft = gen_prob_init_exp_vector(
model_spec, model_spec.ft_exp_shares_file_name
)
prob_exp_pt = gen_prob_init_exp_vector(
model_spec, model_spec.pt_exp_shares_file_name
)
prob_child = gen_prob_child_vector(model_spec)
prob_partner_arrival = gen_prob_partner_arrival(model_spec)
prob_partner_separation = gen_prob_partner_separation(model_spec)
# Solve
(
states,
indexer,
covariates,
non_employment_consumption_resources,
emaxs,
child_age_update_rule,
deductions_spec,
income_tax_spec,
) = pyth_solve(
model_params,
model_spec,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected=False,
)
# Simulate
df = pyth_simulate(
model_params,
model_spec,
states,
indexer,
emaxs,
covariates,
non_employment_consumption_resources,
deductions_spec,
income_tax_spec,
child_age_update_rule,
prob_educ_level,
prob_child_age,
prob_partner_present,
prob_exp_ft,
prob_exp_pt,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected=False,
)
# Count individuals with each educ level
counts = []
for i in [0, 1, 2]:
counts.append(df[df["Education_Level"] == i]["Identifier"].nunique())
shape = (
constr["AGENTS"] * constr["PERIODS"]
- counts[1] * constr["EDUC_YEARS"][1]
- counts[2] * constr["EDUC_YEARS"][2]
)
np.testing.assert_array_equal(df.shape[0], shape)
def test_unit_states_hard_code():
"""This test ensures that the state space creation generates the correct admissible
state space points for the first 4 periods."""
model_spec = collections.namedtuple(
"model_spec",
"num_periods num_educ_levels num_types \
last_child_bearing_period, child_age_max \
educ_years child_age_init_max init_exp_max",
)
model_spec = model_spec(2, 3, 2, 24, 10, [0, 0, 0], 4, 2)
states, _ = pyth_create_state_space(model_spec)
states_shape_true = (2748, 8)
states_batch_1_true = [
[0, 0, 0, 0, 0, 0, -1, 0],
[0, 0, 0, 1, 0, 0, -1, 0],
[0, 0, 0, 2, 0, 0, -1, 0],
[0, 0, 0, 0, 1, 0, -1, 0],
[0, 0, 0, 1, 1, 0, -1, 0],
[0, 0, 0, 2, 1, 0, -1, 0],
[0, 0, 0, 0, 2, 0, -1, 0],
[0, 0, 0, 1, 2, 0, -1, 0],
[0, 0, 0, 2, 2, 0, -1, 0],
[0, 1, 0, 0, 0, 0, -1, 0],
[0, 1, 0, 1, 0, 0, -1, 0],
[0, 1, 0, 2, 0, 0, -1, 0],
[0, 1, 0, 0, 1, 0, -1, 0],
[0, 1, 0, 1, 1, 0, -1, 0],
[0, 1, 0, 2, 1, 0, -1, 0],
[0, 1, 0, 0, 2, 0, -1, 0],
[0, 1, 0, 1, 2, 0, -1, 0],
[0, 1, 0, 2, 2, 0, -1, 0],
[0, 2, 0, 0, 0, 0, -1, 0],
[0, 2, 0, 1, 0, 0, -1, 0],
[0, 2, 0, 2, 0, 0, -1, 0],
[0, 2, 0, 0, 1, 0, -1, 0],
[0, 2, 0, 1, 1, 0, -1, 0],
[0, 2, 0, 2, 1, 0, -1, 0],
[0, 2, 0, 0, 2, 0, -1, 0],
[0, 2, 0, 1, 2, 0, -1, 0],
[0, 2, 0, 2, 2, 0, -1, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 2, 0, 0, 0, 0],
]
states_batch_2_true = [
[1, 1, 2, 0, 3, 0, -1, 1],
[1, 1, 2, 1, 3, 0, -1, 1],
[1, 1, 2, 2, 3, 0, -1, 1],
[1, 2, 0, 0, 0, 0, -1, 1],
[1, 2, 0, 1, 0, 0, -1, 1],
[1, 2, 1, 1, 0, 0, -1, 1],
[1, 2, 0, 2, 0, 0, -1, 1],
[1, 2, 1, 2, 0, 0, -1, 1],
[1, 2, 1, 3, 0, 0, -1, 1],
[1, 2, 0, 0, 1, 0, -1, 1],
[1, 2, 2, 0, 1, 0, -1, 1],
[1, 2, 0, 1, 1, 0, -1, 1],
[1, 2, 1, 1, 1, 0, -1, 1],
[1, 2, 2, 1, 1, 0, -1, 1],
[1, 2, 1, 2, 1, 0, -1, 1],
[1, 2, 2, 2, 1, 0, -1, 1],
[1, 2, 1, 3, 1, 0, -1, 1],
[1, 2, 0, 0, 2, 0, -1, 1],
[1, 2, 2, 0, 2, 0, -1, 1],
[1, 2, 1, 1, 2, 0, -1, 1],
[1, 2, 2, 1, 2, 0, -1, 1],
[1, 2, 0, 2, 2, 0, -1, 1],
[1, 2, 1, 2, 2, 0, -1, 1],
[1, 2, 2, 2, 2, 0, -1, 1],
[1, 2, 1, 3, 2, 0, -1, 1],
[1, 2, 2, 0, 3, 0, -1, 1],
[1, 2, 2, 1, 3, 0, -1, 1],
[1, 2, 2, 2, 3, 0, -1, 1],
[1, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 1, 0, 0, 0, 1],
]
states_batch_3_true = [
[1, 2, 1, 1, 0, 1, 2, 1],
[1, 2, 0, 2, 0, 1, 2, 1],
[1, 2, 1, 2, 0, 1, 2, 1],
[1, 2, 1, 3, 0, 1, 2, 1],
[1, 2, 0, 0, 1, 1, 2, 1],
[1, 2, 2, 0, 1, 1, 2, 1],
[1, 2, 0, 1, 1, 1, 2, 1],
[1, 2, 1, 1, 1, 1, 2, 1],
[1, 2, 2, 1, 1, 1, 2, 1],
[1, 2, 1, 2, 1, 1, 2, 1],
[1, 2, 2, 2, 1, 1, 2, 1],
[1, 2, 1, 3, 1, 1, 2, 1],
[1, 2, 0, 0, 2, 1, 2, 1],
[1, 2, 2, 0, 2, 1, 2, 1],
[1, 2, 1, 1, 2, 1, 2, 1],
[1, 2, 2, 1, 2, 1, 2, 1],
[1, 2, 0, 2, 2, 1, 2, 1],
[1, 2, 1, 2, 2, 1, 2, 1],
[1, 2, 2, 2, 2, 1, 2, 1],
[1, 2, 1, 3, 2, 1, 2, 1],
[1, 2, 2, 0, 3, 1, 2, 1],
[1, 2, 2, 1, 3, 1, 2, 1],
[1, 2, 2, 2, 3, 1, 2, 1],
[1, 0, 0, 0, 0, 1, 3, 1],
[1, 0, 0, 1, 0, 1, 3, 1],
[1, 0, 1, 1, 0, 1, 3, 1],
[1, 0, 0, 2, 0, 1, 3, 1],
[1, 0, 1, 2, 0, 1, 3, 1],
[1, 0, 1, 3, 0, 1, 3, 1],
[1, 0, 0, 0, 1, 1, 3, 1],
]
np.testing.assert_array_equal(states_shape_true, states.shape)
np.testing.assert_array_equal(states_batch_1_true, states[0:30])
np.testing.assert_array_equal(states_batch_2_true, states[1220:1250])
np.testing.assert_array_equal(states_batch_3_true, states[2500:2530])
def test_unit_childbearing_age():
"""This test verifies that the state space does not contain newly born children
after the last childbearing period"""
expected = 0
model_spec = collections.namedtuple(
"model_spec",
"num_periods num_educ_levels num_types \
last_child_bearing_period child_age_max \
educ_years child_age_init_max init_exp_max",
)
num_periods = randint(1, 11)
last_child_bearing_period = randrange(num_periods)
model_spec = model_spec(
num_periods, 3, 2, last_child_bearing_period, 10, [0, 1, 2], 4, 4
)
states, _ = pyth_create_state_space(model_spec)
np.testing.assert_equal(
sum(
states[np.where(states[:, 0] == model_spec.last_child_bearing_period + 1)][
:, 6
]
== 0
),
expected,
)
def test_no_children_no_exp():
"""This test ensures that
i) child age equals -1 in the entire simulates sample,
equivalent to no kid is ever born, if the probability to get a child is zero
for all periods
ii) initial experience is zero if so specified in constraint"""
expected = 0
is_expected = False
constr = {"AGENTS": 200, "PERIODS": 10, "CHILD_AGE_INIT_MAX": -1, "INIT_EXP_MAX": 0}
random_init(constr)
model_params_df, model_params = read_model_params_init("test.soepy.pkl")
model_spec = read_model_spec_init("test.soepy.yml", model_params_df)
# Set probability of having children to zero for all periods
prob_child = np.full((model_spec.num_periods, 3), 0.00)
prob_educ_level = gen_prob_educ_level_vector(model_spec)
prob_child_age = gen_prob_child_init_age_vector(model_spec)
prob_partner_present = gen_prob_partner_present_vector(model_spec)
prob_exp_ft = gen_prob_init_exp_vector(
model_spec, model_spec.ft_exp_shares_file_name
)
prob_exp_pt = gen_prob_init_exp_vector(
model_spec, model_spec.pt_exp_shares_file_name
)
prob_partner_arrival = gen_prob_partner_arrival(model_spec)
prob_partner_separation = gen_prob_partner_separation(model_spec)
# Solve
(
states,
indexer,
covariates,
non_employment_consumption_resources,
emaxs,
child_age_update_rule,
deductions_spec,
income_tax_spec,
) = pyth_solve(
model_params,
model_spec,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected,
)
# Simulate
df = pyth_simulate(
model_params,
model_spec,
states,
indexer,
emaxs,
covariates,
non_employment_consumption_resources,
deductions_spec,
income_tax_spec,
child_age_update_rule,
prob_educ_level,
prob_child_age,
prob_partner_present,
prob_exp_ft,
prob_exp_pt,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected=False,
)
np.testing.assert_equal(sum(df.dropna()["Age_Youngest_Child"] != -1), expected)
np.testing.assert_equal(
sum(df[df["Period"] == 0].dropna()["Experience_Part_Time"] != 0), expected
)
np.testing.assert_equal(
sum(df[df["Period"] == 0].dropna()["Experience_Full_Time"] != 0), expected
)
def test_shares_according_to_initial_conditions():
"""This test ensures that the shares of individuals with particular characteristics
in the simulated data frame as determined by initial conditions correspond to the probabilities
specified in the init file.
"""
constr = dict()
constr["AGENTS"] = 500000
constr["EDUC_YEARS"] = [0, 0, 0]
constr["PERIODS"] = 2
constr["CHILD_AGE_INIT_MAX"] = 1
constr["INIT_EXP_MAX"] = 2
random_init(constr)
model_params_df, model_params = read_model_params_init("test.soepy.pkl")
model_spec = read_model_spec_init("test.soepy.yml", model_params_df)
prob_educ_level = gen_prob_educ_level_vector(model_spec)
prob_child_age = gen_prob_child_init_age_vector(model_spec)
prob_partner_present = gen_prob_partner_present_vector(model_spec)
prob_exp_ft = gen_prob_init_exp_vector(
model_spec, model_spec.ft_exp_shares_file_name
)
prob_exp_pt = gen_prob_init_exp_vector(
model_spec, model_spec.pt_exp_shares_file_name
)
prob_child = gen_prob_child_vector(model_spec)
prob_partner_arrival = gen_prob_partner_arrival(model_spec)
prob_partner_separation = gen_prob_partner_separation(model_spec)
# Solve
(
states,
indexer,
covariates,
non_employment_consumption_resources,
emaxs,
child_age_update_rule,
deductions_spec,
income_tax_spec,
) = pyth_solve(
model_params,
model_spec,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected=False,
)
# Simulate
df = pyth_simulate(
model_params,
model_spec,
states,
indexer,
emaxs,
covariates,
non_employment_consumption_resources,
deductions_spec,
income_tax_spec,
child_age_update_rule,
prob_educ_level,
prob_child_age,
prob_partner_present,
prob_exp_ft,
prob_exp_pt,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected=False,
)
# Education level shares
simulated = (
df.groupby(["Education_Level"])["Identifier"].nunique().to_numpy()
/ constr["AGENTS"]
)
np.testing.assert_almost_equal(
simulated, prob_educ_level, decimal=2, err_msg="Education level shares mismatch"
)
# Partner status in initial period
simulated = (
df[df["Period"] == 0]
.groupby(["Education_Level"])["Partner_Indicator"]
.mean()
.to_numpy()
)
np.testing.assert_almost_equal(
simulated, prob_partner_present, decimal=2, err_msg="Partner shares mismatch"
)
# Child ages in initial period
simulated = (
df[df["Period"] == 0]
.groupby(["Education_Level"])["Age_Youngest_Child"]
.value_counts(normalize=True)
.sort_index(ascending=True)
.to_numpy()
)
prob_child_age_flat = [item for sublist in prob_child_age for item in sublist]
np.testing.assert_almost_equal(
simulated, prob_child_age_flat, decimal=2, err_msg="Child age shares mismatch"
)
# Experience in initial period
# Part-time
simulated = (
df[df["Period"] == 0]
.groupby(["Education_Level"])["Experience_Part_Time"]
.value_counts(normalize=True)
.sort_index(ascending=True)
.to_numpy()
)
prob_exp_pt_flat = [item for sublist in prob_exp_pt for item in sublist]
np.testing.assert_almost_equal(
simulated,
prob_exp_pt_flat,
decimal=2,
err_msg="Part-time experience shares mismatch",
)
# Full-time
simulated = (
df[df["Period"] == 0]
.groupby(["Education_Level"])["Experience_Full_Time"]
.value_counts(normalize=True)
.sort_index(ascending=True)
.to_numpy()
)
prob_exp_ft_flat = [item for sublist in prob_exp_ft for item in sublist]
np.testing.assert_almost_equal(
simulated,
prob_exp_ft_flat,
decimal=2,
err_msg="Full-time experience shares mismatch",
)
def test_coef_educ_level_specificity():
"""This test ensures that when parameters for a specific
education group are changed, the simulated data for the remaining education
groups does not change."""
constr = dict()
constr["AGENTS"] = 10000
constr["PERIODS"] = 10
random_init(constr)
model_params_base = pd.read_pickle("test.soepy.pkl")
# Draw random education level to change
random_educ_level = random.choice([0, 1, 2])
param_to_change = "gamma_1s" + str(random_educ_level + 1)
model_params_changed = model_params_base
model_params_changed.loc[("exp_returns", param_to_change), "value"] = (
model_params_changed.loc[("exp_returns", param_to_change), "value"] * 2
)
data = []
for i in (model_params_base, model_params_changed):
model_params_df, model_params = read_model_params_init(i)
model_spec = read_model_spec_init("test.soepy.yml", model_params_df)
prob_educ_level = gen_prob_educ_level_vector(model_spec)
prob_child_age = gen_prob_child_init_age_vector(model_spec)
prob_partner_present = gen_prob_partner_present_vector(model_spec)
prob_exp_ft = gen_prob_init_exp_vector(
model_spec, model_spec.ft_exp_shares_file_name
)
prob_exp_pt = gen_prob_init_exp_vector(
model_spec, model_spec.pt_exp_shares_file_name
)
prob_child = gen_prob_child_vector(model_spec)
prob_partner_arrival = gen_prob_partner_arrival(model_spec)
prob_partner_separation = gen_prob_partner_separation(model_spec)
# Solve
(
states,
indexer,
covariates,
non_employment_consumption_resources,
emaxs,
child_age_update_rule,
deductions_spec,
income_tax_spec,
) = pyth_solve(
model_params,
model_spec,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected=False,
)
# Simulate
df = pyth_simulate(
model_params,
model_spec,
states,
indexer,
emaxs,
covariates,
non_employment_consumption_resources,
deductions_spec,
income_tax_spec,
child_age_update_rule,
prob_educ_level,
prob_child_age,
prob_partner_present,
prob_exp_ft,
prob_exp_pt,
prob_child,
prob_partner_arrival,
prob_partner_separation,
is_expected=False,
)
data.append(df)
data_base = data[0]
data_changed = data[1]
for level in (0, 1, 2):
if level == random_educ_level:
continue
data_base_educ_level = data_base[data_base["Education_Level"] == level]
data_changed_educ_level = data_changed[data_changed["Education_Level"] == level]
pd.testing.assert_frame_equal(data_base_educ_level, data_changed_educ_level)
| 31.966825
| 99
| 0.593131
|
90b2b4fcc3b62528ac08d88f0794659301e1770c
| 3,328
|
py
|
Python
|
torch_sparse/index_select.py
|
mariogeiger/pytorch_sparse
|
57852a6664ce58324dbeab0db13837ffb9005930
|
[
"MIT"
] | 623
|
2018-07-29T10:45:05.000Z
|
2022-03-29T04:35:08.000Z
|
torch_sparse/index_select.py
|
mariogeiger/pytorch_sparse
|
57852a6664ce58324dbeab0db13837ffb9005930
|
[
"MIT"
] | 201
|
2018-08-15T14:11:03.000Z
|
2022-03-31T14:14:01.000Z
|
torch_sparse/index_select.py
|
mariogeiger/pytorch_sparse
|
57852a6664ce58324dbeab0db13837ffb9005930
|
[
"MIT"
] | 101
|
2018-10-14T04:08:11.000Z
|
2022-03-23T21:33:37.000Z
|
from typing import Optional
import torch
from torch_scatter import gather_csr
from torch_sparse.storage import SparseStorage, get_layout
from torch_sparse.tensor import SparseTensor
def index_select(src: SparseTensor, dim: int,
idx: torch.Tensor) -> SparseTensor:
dim = src.dim() + dim if dim < 0 else dim
assert idx.dim() == 1
if dim == 0:
old_rowptr, col, value = src.csr()
rowcount = src.storage.rowcount()
rowcount = rowcount[idx]
rowptr = col.new_zeros(idx.size(0) + 1)
torch.cumsum(rowcount, dim=0, out=rowptr[1:])
row = torch.arange(idx.size(0),
device=col.device).repeat_interleave(rowcount)
perm = torch.arange(row.size(0), device=row.device)
perm += gather_csr(old_rowptr[idx] - rowptr[:-1], rowptr)
col = col[perm]
if value is not None:
value = value[perm]
sparse_sizes = (idx.size(0), src.sparse_size(1))
storage = SparseStorage(row=row, rowptr=rowptr, col=col, value=value,
sparse_sizes=sparse_sizes, rowcount=rowcount,
colptr=None, colcount=None, csr2csc=None,
csc2csr=None, is_sorted=True)
return src.from_storage(storage)
elif dim == 1:
old_colptr, row, value = src.csc()
colcount = src.storage.colcount()
colcount = colcount[idx]
colptr = row.new_zeros(idx.size(0) + 1)
torch.cumsum(colcount, dim=0, out=colptr[1:])
col = torch.arange(idx.size(0),
device=row.device).repeat_interleave(colcount)
perm = torch.arange(col.size(0), device=col.device)
perm += gather_csr(old_colptr[idx] - colptr[:-1], colptr)
row = row[perm]
csc2csr = (idx.size(0) * row + col).argsort()
row, col = row[csc2csr], col[csc2csr]
if value is not None:
value = value[perm][csc2csr]
sparse_sizes = (src.sparse_size(0), idx.size(0))
storage = SparseStorage(row=row, rowptr=None, col=col, value=value,
sparse_sizes=sparse_sizes, rowcount=None,
colptr=colptr, colcount=colcount, csr2csc=None,
csc2csr=csc2csr, is_sorted=True)
return src.from_storage(storage)
else:
value = src.storage.value()
if value is not None:
return src.set_value(value.index_select(dim - 1, idx),
layout='coo')
else:
raise ValueError
def index_select_nnz(src: SparseTensor, idx: torch.Tensor,
layout: Optional[str] = None) -> SparseTensor:
assert idx.dim() == 1
if get_layout(layout) == 'csc':
idx = src.storage.csc2csr()[idx]
row, col, value = src.coo()
row, col = row[idx], col[idx]
if value is not None:
value = value[idx]
return SparseTensor(row=row, rowptr=None, col=col, value=value,
sparse_sizes=src.sparse_sizes(), is_sorted=True)
SparseTensor.index_select = lambda self, dim, idx: index_select(self, dim, idx)
tmp = lambda self, idx, layout=None: index_select_nnz( # noqa
self, idx, layout)
SparseTensor.index_select_nnz = tmp
| 32.627451
| 79
| 0.582332
|
f825c1f14b6f2ccd5c4f5751f712af5ace6d4d0d
| 145
|
py
|
Python
|
docs/ui/examples/example7dd39b928b483d0ec531f34910149fc4.py
|
okajun35/Flexx_translate_ja
|
d6aaf2d981623e69cd70d20761b6509ed5af304f
|
[
"MIT"
] | 1
|
2022-03-09T03:35:56.000Z
|
2022-03-09T03:35:56.000Z
|
docs/ui/examples/example7dd39b928b483d0ec531f34910149fc4.py
|
okajun35/Flexx_translate_ja
|
d6aaf2d981623e69cd70d20761b6509ed5af304f
|
[
"MIT"
] | null | null | null |
docs/ui/examples/example7dd39b928b483d0ec531f34910149fc4.py
|
okajun35/Flexx_translate_ja
|
d6aaf2d981623e69cd70d20761b6509ed5af304f
|
[
"MIT"
] | null | null | null |
from flexx import app, ui
# A red widget
class Example(ui.Widget):
CSS = ".flx-Example {background:#f00; min-width: 20px; min-height:20px}"
| 24.166667
| 76
| 0.696552
|
8db21d85e455f281652c3a01a12e96a4a1892505
| 26,337
|
py
|
Python
|
archive_scripts/x_nba_shot_charts.py
|
Connor-R/nba_shot_charts
|
7331ea4acbf8f38073681efb97576b36cca519d7
|
[
"MIT"
] | 12
|
2017-04-07T17:15:57.000Z
|
2021-02-03T22:57:00.000Z
|
archive_scripts/x_nba_shot_charts.py
|
Connor-R/nba_shot_charts
|
7331ea4acbf8f38073681efb97576b36cca519d7
|
[
"MIT"
] | null | null | null |
archive_scripts/x_nba_shot_charts.py
|
Connor-R/nba_shot_charts
|
7331ea4acbf8f38073681efb97576b36cca519d7
|
[
"MIT"
] | null | null | null |
import requests
import urllib
import os
import shutil
import csv
import sys
import glob
import math
import pandas as pd
import numpy as np
import argparse
import matplotlib as mpb
import matplotlib.pyplot as plt
from matplotlib import offsetbox as osb
from matplotlib.patches import RegularPolygon
from datetime import date, datetime, timedelta
# setting the color map we want to use
mymap = mpb.cm.YlOrRd
# taking in a dictionary of player information and initializing the processes
def initiate(p_list, list_length, printer=True):
# setting our base directory, I have this set to your current working directory (cwd)
base_path = os.getcwd()
# iterating through our player dictionary to grab the player_title and player_id
counter = 1
for player_title, player_data in p_list.items():
player_id, start_year, end_year = player_data
start_year, end_year = int(start_year), int(end_year)
if printer is True:
print "\n\nProcessing Player " + str(counter) + " of " + list_length + ': ' + player_title + ' (' + str(start_year) + ' - ' + str(end_year) + ')\n'
counter += 1
if start_year < 1996:
start_year = 1996
if end_year > 2017:
end_year = 2017
player_name = player_title.replace(" ","_")
# defines a path to a directory for saving the charts of the current player
path = base_path+'/shot_charts/'+player_name+'/'
# checks if our desired directory exists, archived the charts if they exist, and (re-)create the directory
if not os.path.exists(path):
os.makedirs(path)
# if you download this code and re-use it, you'll either have to alter the path in the next line, or delete the following 3 lines
else:
arch_path = base_path+'/shot_charts_archived_charts/'+str(date.today())+'_'+str(datetime.now().hour)+'_'+player_name
if os.path.exists(arch_path):
shutil.rmtree(arch_path)
os.rename(path, arch_path)
os.makedirs(path)
# deletes previous versions of images
os.chdir(path)
files=glob.glob('*.png')
for filename in files:
os.unlink(filename)
os.chdir(base_path)
# We set a min and max year to later overwrite (for usage in noting a player's career length)
min_year = 9999
max_year = 0
# we set an empty DataFrame and will append each year's shots, creating a career shot log
all_shots_df = pd.DataFrame()
# we iterate through each year of a player's career, creating a shot chart for every year while also adding each season's data to our all_shots_df DataFrame
for year in range(start_year,end_year):
season_start = year
# takes a season (e.g. 2008) and returns the nba ID (e.g. 2008-09)
season_id = str(season_start)+'-'+str(season_start%100+1).zfill(2)[-2:]
if printer is True:
# we print the season/player combo in order to monitor progress
print '\t',
print season_id, player_name
# a DataFrame of the shots a player took in a given season
year_shots_df = acquire_shootingData(player_id, season_id)
# if the DataFrame isn't empty (i.e., if the player played this season), we make a shot chart for this season as well as append the career DataFrame for the player and overwrite the current min_year and max_year variables
if year_shots_df is not None and len(year_shots_df.index) != 0:
if year < min_year:
min_year = year
if (year+1) > max_year:
max_year = (year+1)
# plotting the data for the given season/player combination we are iterating through
shooting_plot(path, year_shots_df, player_id, season_id, player_title, player_name)
# appending the career shots DataFrame
all_shots_df = all_shots_df.append(year_shots_df, ignore_index=True)
# making a text string for usage in the career shot chart
# again, if you download and are re-using this code, you'll either have to delete or change the arch_path that I use for archiving old charts
if min_year == 9999 or max_year == 0:
arch_path = base_path + '/shot_charts_archived_charts/'+player_name+'_NOGAMES'
os.makedirs(arch_path)
else:
career_string = "CAREER (%s-%s)" % (min_year, max_year)
if printer is True:
print '\t\t\t', career_string, player_name
# making a shot chart for all shots in the player's career. note that we have to use the option isCareer, min_year, and max_year arguments to properly format this chart
shooting_plot(path, all_shots_df, player_id, career_string, player_title, player_name, isCareer=True, min_year=min_year, max_year=max_year)
# after we finish the script, we remove all the player images that were saved to the directory during the acquire_playerPic function
os.chdir(base_path)
files=glob.glob('*.png')
for filename in files:
os.unlink(filename)
#Getting the shot data and returning a DataFrame with every shot for a specific player/season combo
def acquire_shootingData(player_id,season):
import requests
shot_chart_temp = 'http://stats.nba.com/stats/shotchartdetail?CFID=33&CFPARAMS=%s&ContextFilter=&ContextMeasure=FGA&DateFrom=&DateTo=&GameID=&GameSegment=&LastNGames=0&LeagueID=00&Location=&MeasureType=Base&Month=0&OpponentTeamID=0&Outcome=&PaceAdjust=N&PerMode=PerGame&Period=0&PlayerID=%s&PlusMinus=N&PlayerPosition=&Rank=N&RookieYear=&Season=%s&SeasonSegment=&SeasonType=Regular+Season&TeamID=0&VsConference=&VsDivision=&mode=Advanced&showDetails=0&showShots=1&showZones=0'
shot_chart_url = shot_chart_temp % (season, player_id, season)
# print shot_chart_url
# user agent makes it seem as though we're an actual user getting the data
user_agent = 'User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36'
response = requests.get(shot_chart_url, headers={'User-Agent': user_agent})
# headers is the column titles for our DataFrame
headers = response.json()['resultSets'][0]['headers']
# shots will be each row in the DataFrame
shots = response.json()['resultSets'][0]['rowSet']
# if there were no shots at that URL, we return nothing
if shots == []:
return
# creating our DataFrame from our shots and headers variables
shot_df = pd.DataFrame(shots, columns=headers)
return shot_df
# we set gridNum to be 30 (basically a grid of 30x30 hexagons)
def shooting_plot(path, shot_df, player_id, season_id, player_title, player_name, isCareer=False, min_year = 0, max_year = 0, plot_size=(12,12), gridNum=30):
# get the shooting percentage and number of shots for all bins, all shots, and a subset of some shots
(ShootingPctLocs, shotNumber), shot_pts_all, shot_pts_3, shot_pts_2, shot_pts_mid, shot_pts_NONres, shot_pts_res, shot_count_all, shot_count_3, shot_count_2, shot_count_mid, shot_count_NONres, shot_count_res, team_list = find_shootingPcts(shot_df, gridNum)
# returns the effective FG% values for usage later in the chart's text
def get_efg(shot_pts, shot_count):
try:
eff_fg_float = float((float(shot_pts)/2.0)/float(shot_count))*100.0
except ZeroDivisionError:
eff_fg_float = 0.0
eff_fg = ("%.2f" % eff_fg_float)
pct_float = float(shot_count)/float(shot_count_all)*100
pct = ("%.2f" % pct_float)
return eff_fg_float, eff_fg, pct_float, pct
eff_fg_all_float, eff_fg_all, pct_all_float, pct_all = get_efg(shot_pts_all, shot_count_all)
eff_fg_3_float, eff_fg_3, pct_3_float, pct_3 = get_efg(shot_pts_3, shot_count_3)
eff_fg_2_float, eff_fg_2, pct_2_float, pct_2 = get_efg(shot_pts_2, shot_count_2)
eff_fg_mid_float, eff_fg_mid, pct_mid_float, pct_mid = get_efg(shot_pts_mid, shot_count_mid)
eff_fg_NONres_float, eff_fg_NONres, pct_NONres_float, pct_NONres = get_efg(shot_pts_NONres, shot_count_NONres)
eff_fg_res_float, eff_fg_res, pct_res_float, pct_res = get_efg(shot_pts_res, shot_count_res)
# Creates a text string for all teams that a player has played on in a given season or career
team_text = ""
if len(team_list) == 1:
team_text = str(team_list[0])
else:
i = 0
for team in team_list[0:-1]:
if i%2 == 0 and i > 0:
team_text += '\n'
text_add = '%s, ' % str(team)
team_text += text_add
i += 1
if i%2 == 0:
team_text += '\n'
team_text += str(team_list[-1])
# set the figure for drawing on
fig = plt.figure(figsize=(12,12))
# cmap will be used as our color map going forward
cmap = mymap
# where to place the plot within the figure, first two attributes are the x_min and y_min, and the next 2 are the % of the figure that is covered in the x_direction and y_direction (so in this case, our plot will go from (0.05, 0.15) at the bottom left, and stretches to (0.85,0.925) at the top right)
ax = plt.axes([0.05, 0.15, 0.81, 0.775])
# setting the background color using a hex code (http://www.rapidtables.com/web/color/RGB_Color.htm)
ax.set_axis_bgcolor('#08374B')
# draw the outline of the court
draw_court(outer_lines=False)
# specify the dimensions of the court we draw
plt.xlim(-250,250)
plt.ylim(370, -30)
# draw player image
zoom = 1 # we don't need to zoom the image at all
img = acquire_playerPic(player_id, zoom)
ax.add_artist(img)
# specify the % a zone that we want to correspond to a maximum sized hexagon [I have this set to any zone with >= 1% of all shots will have a maximum radius, but it's free to be changed based on personal preferences]
max_radius_perc = 1.0
max_rad_multiplier = 100.0/max_radius_perc
# changing to what power we want to scale the area of the hexagons as we increase/decrease the radius. This value can also be changed for personal preferences.
area_multiplier = (3./4.)
# draw hexagons
# i is the bin#, and shots is the shooting% for that bin
for i, shots in enumerate(ShootingPctLocs):
x,y = shotNumber.get_offsets()[i]
# we check the distance from the hoop the bin is. If it in 3pt territory, we add a multiplier of 1.5 to the shooting% to properly encapsulate eFG%
dist = math.sqrt(x**2 + y**2)
mult = 1.0
if abs(x) >= 220:
mult = 1.5
elif dist/10 >= 23.75:
mult = 1.5
else:
mult = 1.0
# Setting the eFG% for a bin, making sure it's never over 1 (our maximum color value)
bin_pct = min(shots*mult, 1.0)
hexes = RegularPolygon(
shotNumber.get_offsets()[i], #x/y coords
numVertices=6,
radius=(295/gridNum)*((max_rad_multiplier*((shotNumber.get_array()[i]))/shot_count_all)**(area_multiplier)),
color=cmap(bin_pct),
alpha=0.95,
fill=True)
# setting a maximum radius for our bins at 295 (personal preference)
if hexes.radius > 295/gridNum:
hexes.radius = 295/gridNum
ax.add_patch(hexes)
# creating the frequency legend
# we want to have 4 ticks in this legend so we iterate through 4 items
for i in range(0,4):
base_rad = max_radius_perc/4
# the x,y coords for our patch (the first coordinate is (-205,415), and then we move up and left for each addition coordinate)
patch_x = -205-(10*i)
patch_y = 365-(14*i)
# specifying the size of our hexagon in the frequency legend
patch_rad = (299.9/gridNum)*((base_rad+(base_rad*i))**(area_multiplier))
patch_perc = base_rad+(i*base_rad)
# the x,y coords for our text
text_x = patch_x + patch_rad + 2
text_y = patch_y
patch_axes = (patch_x, patch_y)
# the text will be slightly different for our maximum sized hexagon,
if i < 3:
text_text = ' %s%% of Attempted Shots' % ('%.2f' % patch_perc)
else:
text_text = '$\geq$%s%% of Attempted Shots' %(str(patch_perc))
# draw the hexagon. the color=map(eff_fg_all_float/100) makes the hexagons in the legend the same color as the player's overall eFG%
patch = RegularPolygon(patch_axes, numVertices=6, radius=patch_rad, color=cmap(eff_fg_all_float/100), alpha=0.95, fill=True)
ax.add_patch(patch)
# add the text for the hexagon
ax.text(text_x, text_y, text_text, fontsize=12, horizontalalignment='left', verticalalignment='center', family='DejaVu Sans', color='white', fontweight='bold')
# Add a title to our frequency legend (the x/y coords are hardcoded).
# Again, the color=map(eff_fg_all_float/100) makes the hexagons in the legend the same color as the player's overall eFG%
ax.text(-235, 310, 'Zone Frequencies', fontsize = 15, horizontalalignment='left', verticalalignment='bottom', family='DejaVu Sans', color=cmap(eff_fg_all_float/100), fontweight='bold')
# Add a title to our chart (just the player's name)
chart_title = "%s" % (player_title.upper())
ax.text(31.25,-40, chart_title, fontsize=29, horizontalalignment='center', verticalalignment='bottom', family='DejaVu Sans', color=cmap(eff_fg_all_float/100), fontweight='bold')
# Add user text
ax.text(-250,-31,'CHARTS BY @NBAChartBot',
fontsize=10, horizontalalignment='left', verticalalignment = 'bottom', family='DejaVu Sans', color='white', fontweight='bold')
# Add data source text
ax.text(31.25,-31,'DATA FROM STATS.NBA.COM',
fontsize=10, horizontalalignment='center', verticalalignment = 'bottom', family='DejaVu Sans', color='white', fontweight='bold')
# Add date text
_date = date.today()
ax.text(250,-31,'AS OF %s' % (str(_date)),
fontsize=10, horizontalalignment='right', verticalalignment = 'bottom', family='DejaVu Sans', color='white', fontweight='bold')
# adding breakdown of eFG% by shot zone at the bottom of the chart
ax.text(300,380, '%s Points - %s Shots [TOTAL (%s%% of total)] (%s eFG%%)'
'\n%s Points - %s Shots [All 3PT (%s%%)] (%s eFG%%)'
'\n%s Points - %s Shots [All 2PT (%s%%)] (%s eFG%%)'
'\n%s Points - %s Shots [Mid-Range (%s%%)] (%s eFG%%)'
'\n%s Points - %s Shots [Paint (Non-Restricted) (%s%%)] (%s eFG%%)'
'\n%s Points - %s Shots [Paint (Restricted) (%s%%)] (%s eFG%%)' % (shot_pts_all, shot_count_all, pct_all, eff_fg_all, shot_pts_3, shot_count_3, pct_3, eff_fg_3, shot_pts_2, shot_count_2, pct_2, eff_fg_2, shot_pts_mid, shot_count_mid, pct_mid, eff_fg_mid, shot_pts_NONres, shot_count_NONres, pct_NONres, eff_fg_NONres, shot_pts_res, shot_count_res, pct_res, eff_fg_res),
fontsize=12, horizontalalignment='right', verticalalignment = 'top', family='DejaVu Sans', color='white', linespacing=1.5)
# adding which season the chart is for, as well as what teams the player is on
if len(team_list) > 10:
ax.text(-250,380,'%s Regular Season'
'\n%s' % (season_id, team_text),
fontsize=10, horizontalalignment='left', verticalalignment = 'top', family='DejaVu Sans', color='white', linespacing=1.3)
else:
ax.text(-250,380,'%s Regular Season'
'\n%s' % (season_id, team_text),
fontsize=12, horizontalalignment='left', verticalalignment = 'top', family='DejaVu Sans', color='white', linespacing=1.5)
# adding a color bar for reference
ax2 = fig.add_axes([0.875, 0.15, 0.04, 0.775])
cb = mpb.colorbar.ColorbarBase(ax2,cmap=cmap, orientation='vertical')
cbytick_obj = plt.getp(cb.ax.axes, 'yticklabels')
plt.setp(cbytick_obj, color='white', fontweight='bold')
cb.set_label('Effective Field Goal %', family='DejaVu Sans', color='white', fontweight='bold', labelpad=-9, fontsize=14)
cb.set_ticks([0.0, 0.25, 0.5, 0.75, 1.0])
cb.set_ticklabels(['0%','25%', '50%','75%', '$\mathbf{\geq}$100%'])
# if the isCareer argument is set to True, we have to slightly alter the title of the plot
title_efg = eff_fg_all_float
if isCareer is False:
figtit = path+'shot_charts_%s_%s_%s.png' % (player_name, season_id, str(int(round(eff_fg_all_float))))
else:
figtit = path+'shot_charts_%s_CAREER_%s-%s_%s.png' % (player_name, min_year, max_year, str(int(round(eff_fg_all_float))))
plt.savefig(figtit, facecolor='#305E72', edgecolor='black')
plt.clf()
#Getting the shooting percentages for each grid.
#The general idea of this function, as well as a substantial block of the actual code was recycled from Dan Vatterott [http://www.danvatterott.com/]
def find_shootingPcts(shot_df, gridNum):
x = shot_df.LOC_X[shot_df['LOC_Y']<425.1]
y = shot_df.LOC_Y[shot_df['LOC_Y']<425.1]
# Grabbing the x and y coords, for all made shots
x_made = shot_df.LOC_X[(shot_df['SHOT_MADE_FLAG']==1) & (shot_df['LOC_Y']<425.1)]
y_made = shot_df.LOC_Y[(shot_df['SHOT_MADE_FLAG']==1) & (shot_df['LOC_Y']<425.1)]
# Recording the point value of each made shot
shot_pts = 2*((shot_df['SHOT_MADE_FLAG']==1) & (shot_df['SHOT_TYPE']=='2PT Field Goal')) + 3*((shot_df['SHOT_MADE_FLAG']==1) & (shot_df['SHOT_TYPE']=='3PT Field Goal'))
# Recording the team name of a player for each made shot
shot_teams = (shot_df['TEAM_NAME'])
# Dropping all the duplicate entries for the team names
teams = shot_teams.drop_duplicates()
# Grabbing all points made from different shot locations, as well as counting all times a shot was missed from that zone
pts_3 = 3*((shot_df['SHOT_MADE_FLAG']==1) & (shot_df['SHOT_TYPE']=='3PT Field Goal'))
miss_3 = 1*((shot_df['SHOT_MADE_FLAG']==0) & (shot_df['SHOT_TYPE']=='3PT Field Goal'))
pts_2 = 2*((shot_df['SHOT_MADE_FLAG']==1) & (shot_df['SHOT_TYPE']=='2PT Field Goal'))
miss_2 = 1*((shot_df['SHOT_MADE_FLAG']==0) & (shot_df['SHOT_TYPE']=='2PT Field Goal'))
pts_mid = 2*((shot_df['SHOT_MADE_FLAG']==1) & (shot_df['SHOT_ZONE_BASIC']=='Mid-Range'))
miss_mid = 1*((shot_df['SHOT_MADE_FLAG']==0) & (shot_df['SHOT_ZONE_BASIC']=='Mid-Range'))
pts_NONrestricted = 2*((shot_df['SHOT_MADE_FLAG']==1) & (shot_df['SHOT_ZONE_BASIC']=='In The Paint (Non-RA)'))
miss_NONrestricted = 1*((shot_df['SHOT_MADE_FLAG']==0) & (shot_df['SHOT_ZONE_BASIC']=='In The Paint (Non-RA)'))
pts_restricted = 2*((shot_df['SHOT_MADE_FLAG']==1) & (shot_df['SHOT_ZONE_BASIC']=='Restricted Area'))
miss_restricted = 1*((shot_df['SHOT_MADE_FLAG']==0) & (shot_df['SHOT_ZONE_BASIC']=='Restricted Area'))
#compute number of shots made and taken from each hexbin location
hb_shot = plt.hexbin(x, y, gridsize=gridNum, extent=(-250,250,425,-50));
plt.close()
hb_made = plt.hexbin(x_made, y_made, gridsize=gridNum, extent=(-250,250,425,-50));
plt.close()
#compute shooting percentage
ShootingPctLocs = hb_made.get_array() / hb_shot.get_array()
ShootingPctLocs[np.isnan(ShootingPctLocs)] = 0 #makes 0/0s=0
# creating a list of all teams a player played for in a given season/career
team_list = []
for team in teams:
team_list.append(team)
# Summing all made points from the shot zones
shot_pts_all = sum(shot_pts)
shot_pts_3 = sum(pts_3)
shot_pts_2 = sum(pts_2)
shot_pts_mid = sum(pts_mid)
shot_pts_NONres = sum(pts_NONrestricted)
shot_pts_res = sum(pts_restricted)
# Counting the total number of shots from the shot zones
shot_count_all = len(shot_df.index)
shot_count_3 = sum(miss_3) + sum(pts_3)/3
shot_count_2 = sum(miss_2) + sum(pts_2)/2
shot_count_mid = sum(miss_mid) + sum(pts_mid)/2
shot_count_NONres = sum(miss_NONrestricted) + sum(pts_NONrestricted)/2
shot_count_res = sum(miss_restricted) + sum(pts_restricted)/2
# Returning all values
return (ShootingPctLocs, hb_shot), shot_pts_all, shot_pts_3, shot_pts_2, shot_pts_mid, shot_pts_NONres, shot_pts_res, shot_count_all, shot_count_3, shot_count_2, shot_count_mid, shot_count_NONres, shot_count_res, team_list
#Drawing the outline of the court
#Most of this code was recycled from Savvas Tjortjoglou [http://savvastjortjoglou.com]
def draw_court(ax=None, color='white', lw=2, outer_lines=False):
from matplotlib.patches import Circle, Rectangle, Arc
if ax is None:
ax = plt.gca()
hoop = Circle((0, 0), radius=7.5, linewidth=lw, color=color, fill=False)
backboard = Rectangle((-30, -7.5), 60, -1, linewidth=lw, color=color)
outer_box = Rectangle((-80, -47.5), 160, 190, linewidth=lw, color=color,
fill=False)
inner_box = Rectangle((-60, -47.5), 120, 190, linewidth=lw, color=color,
fill=False)
top_free_throw = Arc((0, 142.5), 120, 120, theta1=0, theta2=180,
linewidth=lw, color=color, fill=False)
bottom_free_throw = Arc((0, 142.5), 120, 120, theta1=180, theta2=0,
linewidth=lw, color=color, linestyle='dashed')
restricted = Arc((0, 0), 80, 80, theta1=0, theta2=180, linewidth=lw,
color=color)
corner_three_a = Rectangle((-220, -47.5), 0, 140, linewidth=lw,
color=color)
corner_three_b = Rectangle((220, -47.5), 0, 140, linewidth=lw, color=color)
three_arc = Arc((0, 0), 475, 475, theta1=22, theta2=158, linewidth=lw,
color=color)
center_outer_arc = Arc((0, 422.5), 120, 120, theta1=180, theta2=0,
linewidth=lw, color=color)
center_inner_arc = Arc((0, 422.5), 40, 40, theta1=180, theta2=0,
linewidth=lw, color=color)
court_elements = [hoop, backboard, outer_box, inner_box, top_free_throw,
bottom_free_throw, restricted, corner_three_a,
corner_three_b, three_arc, center_outer_arc,
center_inner_arc]
if outer_lines:
outer_lines = Rectangle((-250, -47.5), 500, 470, linewidth=lw,
color=color, fill=False)
court_elements.append(outer_lines)
for element in court_elements:
ax.add_patch(element)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_xticks([])
ax.set_yticks([])
return ax
#for usage with shot_chart_bot
def gen_charts(player_name):
p_list = get_plist()
vals = p_list.get(player_name)
if vals is None:
sys.exit('Need a valid player (check spelling)')
player_list = {player_name:vals}
initiate(player_list, str(len(player_list)), printer=False)
#player_list generation
def get_plist(operator='', filt_value=0, backfill=False):
# a list of interesting players/player_id's that I want to generate shot charts for
csv_file = os.getcwd()+"/player_list.csv"
p_list = {}
with open(csv_file, 'rU') as f:
mycsv = csv.reader(f)
i = 0
for row in mycsv:
if i == 0:
i += 1
continue
else:
i += 1
player_title, player_id, start_year, end_year = row
player_search_name = player_title.replace(" ","_")
# Charts for only new players (only for backfilling)
if backfill is True:
if os.path.exists(os.getcwd()+'/shot_charts/'+player_search_name):
continue
# If a player doesn't have a start_year or end_year, we set those to the max values
if start_year == '':
start_year = 0
if end_year == '':
end_year = 9999
# a filter for which players to update
if operator is '':
p_list[player_title]=[int(player_id), int(start_year), int(end_year)]
else:
if operator == '>=':
if int(end_year) >= filt_value:
p_list[player_title]=[int(player_id), int(start_year), int(end_year)]
elif operator == '<=':
if int(end_year) <= filt_value:
p_list[player_title]=[int(player_id), int(start_year), int(end_year)]
else:
print 'unknown operator, using =='
if int(end_year) == filt_value:
p_list[player_title]=[int(player_id), int(start_year), int(end_year)]
return p_list
#Getting the player picture that we will later place in the chart
#Most of this code was recycled from Savvas Tjortjoglou [http://savvastjortjoglou.com]
def acquire_playerPic(player_id, zoom, offset=(250,370)):
from matplotlib import offsetbox as osb
import urllib
pic = urllib.urlretrieve("http://stats.nba.com/media/players/230x185/"+str(player_id)+".png",str(player_id)+".png")
player_pic = plt.imread(pic[0])
img = osb.OffsetImage(player_pic, zoom)
img = osb.AnnotationBbox(img, offset,xycoords='data',pad=0.0, box_alignment=(1,0), frameon=False)
return img
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# call via [python nba_shot_charts.py --player_name "Zach Randolph"]
parser.add_argument('--player_name',type=str, default='')
args = parser.parse_args()
if args.player_name != '':
p_list = get_plist()
vals = p_list.get(args.player_name)
if vals is None:
sys.exit('Need a valid player_id')
player_list = {args.player_name:vals}
else:
# If we don't have a name, we assume we're trying to backfill
player_list = get_plist(operator='<=', filt_value=9999, backfill=True)
if len(player_list) == 1:
print "\nBegin processing " + str(len(player_list)) + " player\n"
else:
print "\nBegin processing " + str(len(player_list)) + " players\n"
initiate(player_list, str(len(player_list)))
| 47.625678
| 480
| 0.650492
|
dc0ae27970486fbfff44cbccb7c753af0fd1f9a6
| 35,565
|
py
|
Python
|
fastfiz/fz.py
|
sharmapulkit/roNNie-Pool
|
84f511236bc01d80983fcbabba896f065a658028
|
[
"Apache-2.0"
] | 2
|
2017-10-03T19:41:46.000Z
|
2020-03-01T17:39:04.000Z
|
fastfiz/fz.py
|
sharmapulkit/roNNie-pool
|
84f511236bc01d80983fcbabba896f065a658028
|
[
"Apache-2.0"
] | null | null | null |
fastfiz/fz.py
|
sharmapulkit/roNNie-pool
|
84f511236bc01d80983fcbabba896f065a658028
|
[
"Apache-2.0"
] | 2
|
2018-08-07T05:02:09.000Z
|
2019-07-31T20:40:25.000Z
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_fz')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_fz')
_fz = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_fz', [dirname(__file__)])
except ImportError:
import _fz
return _fz
try:
_mod = imp.load_module('_fz', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_fz = swig_import_helper()
del swig_import_helper
else:
import _fz
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class ShotParams(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ShotParams, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ShotParams, name)
__repr__ = _swig_repr
__swig_setmethods__["a"] = _fz.ShotParams_a_set
__swig_getmethods__["a"] = _fz.ShotParams_a_get
if _newclass:
a = _swig_property(_fz.ShotParams_a_get, _fz.ShotParams_a_set)
__swig_setmethods__["b"] = _fz.ShotParams_b_set
__swig_getmethods__["b"] = _fz.ShotParams_b_get
if _newclass:
b = _swig_property(_fz.ShotParams_b_get, _fz.ShotParams_b_set)
__swig_setmethods__["theta"] = _fz.ShotParams_theta_set
__swig_getmethods__["theta"] = _fz.ShotParams_theta_get
if _newclass:
theta = _swig_property(_fz.ShotParams_theta_get, _fz.ShotParams_theta_set)
__swig_setmethods__["phi"] = _fz.ShotParams_phi_set
__swig_getmethods__["phi"] = _fz.ShotParams_phi_get
if _newclass:
phi = _swig_property(_fz.ShotParams_phi_get, _fz.ShotParams_phi_set)
__swig_setmethods__["v"] = _fz.ShotParams_v_set
__swig_getmethods__["v"] = _fz.ShotParams_v_get
if _newclass:
v = _swig_property(_fz.ShotParams_v_get, _fz.ShotParams_v_set)
def __init__(self, *args):
this = _fz.new_ShotParams(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _fz.delete_ShotParams
__del__ = lambda self: None
ShotParams_swigregister = _fz.ShotParams_swigregister
ShotParams_swigregister(ShotParams)
class Point(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Point, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Point, name)
__repr__ = _swig_repr
__swig_setmethods__["x"] = _fz.Point_x_set
__swig_getmethods__["x"] = _fz.Point_x_get
if _newclass:
x = _swig_property(_fz.Point_x_get, _fz.Point_x_set)
__swig_setmethods__["y"] = _fz.Point_y_set
__swig_getmethods__["y"] = _fz.Point_y_get
if _newclass:
y = _swig_property(_fz.Point_y_get, _fz.Point_y_set)
def __init__(self, *args):
this = _fz.new_Point(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def toString(self):
return _fz.Point_toString(self)
def fromString(self, s):
return _fz.Point_fromString(self, s)
__swig_destroy__ = _fz.delete_Point
__del__ = lambda self: None
Point_swigregister = _fz.Point_swigregister
Point_swigregister(Point)
class Vector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Vector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Vector, name)
__repr__ = _swig_repr
__swig_setmethods__["x"] = _fz.Vector_x_set
__swig_getmethods__["x"] = _fz.Vector_x_get
if _newclass:
x = _swig_property(_fz.Vector_x_get, _fz.Vector_x_set)
__swig_setmethods__["y"] = _fz.Vector_y_set
__swig_getmethods__["y"] = _fz.Vector_y_get
if _newclass:
y = _swig_property(_fz.Vector_y_get, _fz.Vector_y_set)
__swig_setmethods__["z"] = _fz.Vector_z_set
__swig_getmethods__["z"] = _fz.Vector_z_get
if _newclass:
z = _swig_property(_fz.Vector_z_get, _fz.Vector_z_set)
def __init__(self, *args):
this = _fz.new_Vector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def toString(self):
return _fz.Vector_toString(self)
def fromString(self, s):
return _fz.Vector_fromString(self, s)
__swig_destroy__ = _fz.delete_Vector
__del__ = lambda self: None
Vector_swigregister = _fz.Vector_swigregister
Vector_swigregister(Vector)
class Ball(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Ball, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Ball, name)
__repr__ = _swig_repr
NOTINPLAY = _fz.Ball_NOTINPLAY
STATIONARY = _fz.Ball_STATIONARY
SPINNING = _fz.Ball_SPINNING
SLIDING = _fz.Ball_SLIDING
ROLLING = _fz.Ball_ROLLING
POCKETED_SW = _fz.Ball_POCKETED_SW
POCKETED_W = _fz.Ball_POCKETED_W
POCKETED_NW = _fz.Ball_POCKETED_NW
POCKETED_NE = _fz.Ball_POCKETED_NE
POCKETED_E = _fz.Ball_POCKETED_E
POCKETED_SE = _fz.Ball_POCKETED_SE
SLIDING_SPINNING = _fz.Ball_SLIDING_SPINNING
ROLLING_SPINNING = _fz.Ball_ROLLING_SPINNING
UNKNOWN_STATE = _fz.Ball_UNKNOWN_STATE
CUE = _fz.Ball_CUE
ONE = _fz.Ball_ONE
TWO = _fz.Ball_TWO
THREE = _fz.Ball_THREE
FOUR = _fz.Ball_FOUR
FIVE = _fz.Ball_FIVE
SIX = _fz.Ball_SIX
SEVEN = _fz.Ball_SEVEN
EIGHT = _fz.Ball_EIGHT
NINE = _fz.Ball_NINE
TEN = _fz.Ball_TEN
ELEVEN = _fz.Ball_ELEVEN
TWELVE = _fz.Ball_TWELVE
THIRTEEN = _fz.Ball_THIRTEEN
FOURTEEN = _fz.Ball_FOURTEEN
FIFTEEN = _fz.Ball_FIFTEEN
UNKNOWN_ID = _fz.Ball_UNKNOWN_ID
def __init__(self, *args):
this = _fz.new_Ball(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getRadius(self):
return _fz.Ball_getRadius(self)
def getID(self):
return _fz.Ball_getID(self)
def getIDString(self):
return _fz.Ball_getIDString(self)
def getState(self):
return _fz.Ball_getState(self)
def getStateString(self):
return _fz.Ball_getStateString(self)
def getPos(self):
return _fz.Ball_getPos(self)
def getVelocity(self):
return _fz.Ball_getVelocity(self)
def getSpin(self):
return _fz.Ball_getSpin(self)
def setID(self, t):
return _fz.Ball_setID(self, t)
def setPos(self, pos):
return _fz.Ball_setPos(self, pos)
def setVelocity(self, vel):
return _fz.Ball_setVelocity(self, vel)
def setSpin(self, spin):
return _fz.Ball_setSpin(self, spin)
def setState(self, s):
return _fz.Ball_setState(self, s)
def isInPlay(self):
return _fz.Ball_isInPlay(self)
def isPocketed(self):
return _fz.Ball_isPocketed(self)
def updateState(self, VERBOSE=False):
return _fz.Ball_updateState(self, VERBOSE)
def toString(self):
return _fz.Ball_toString(self)
def fromString(self, s):
return _fz.Ball_fromString(self, s)
__swig_destroy__ = _fz.delete_Ball
__del__ = lambda self: None
Ball_swigregister = _fz.Ball_swigregister
Ball_swigregister(Ball)
class Table(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Table, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Table, name)
__repr__ = _swig_repr
g = _fz.Table_g
MU_SLIDING = _fz.Table_MU_SLIDING
MU_ROLLING = _fz.Table_MU_ROLLING
MU_SPINNING = _fz.Table_MU_SPINNING
TABLE_LENGTH = _fz.Table_TABLE_LENGTH
TABLE_WIDTH = _fz.Table_TABLE_WIDTH
CORNER_POCKET_WIDTH = _fz.Table_CORNER_POCKET_WIDTH
SIDE_POCKET_WIDTH = _fz.Table_SIDE_POCKET_WIDTH
RAIL_HEIGHT = _fz.Table_RAIL_HEIGHT
CUE_LENGTH = _fz.Table_CUE_LENGTH
RAIL_VEL_DAMPING_X = _fz.Table_RAIL_VEL_DAMPING_X
RAIL_VEL_DAMPING_Y = _fz.Table_RAIL_VEL_DAMPING_Y
RAIL_SPIN_DAMPING = _fz.Table_RAIL_SPIN_DAMPING
RAIL_VEL_ANGLE_ADJ = _fz.Table_RAIL_VEL_ANGLE_ADJ
RAIL_ZSPIN_ANGLE_ADJ = _fz.Table_RAIL_ZSPIN_ANGLE_ADJ
CUE_MASS = _fz.Table_CUE_MASS
I = _fz.Table_I
SW_POCKET = _fz.Table_SW_POCKET
SW_RAIL = _fz.Table_SW_RAIL
W_POCKET = _fz.Table_W_POCKET
NW_RAIL = _fz.Table_NW_RAIL
NW_POCKET = _fz.Table_NW_POCKET
N_RAIL = _fz.Table_N_RAIL
NE_POCKET = _fz.Table_NE_POCKET
NE_RAIL = _fz.Table_NE_RAIL
E_POCKET = _fz.Table_E_POCKET
SE_RAIL = _fz.Table_SE_RAIL
SE_POCKET = _fz.Table_SE_POCKET
S_RAIL = _fz.Table_S_RAIL
UNKNOWN_BOUNDARY = _fz.Table_UNKNOWN_BOUNDARY
SW = _fz.Table_SW
W = _fz.Table_W
NW = _fz.Table_NW
NE = _fz.Table_NE
E = _fz.Table_E
SE = _fz.Table_SE
UNKNOWN_POCKET = _fz.Table_UNKNOWN_POCKET
def __init__(self, *args):
this = _fz.new_Table(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getLength(self):
return _fz.Table_getLength(self)
def getWidth(self):
return _fz.Table_getWidth(self)
def getHeadString(self):
return _fz.Table_getHeadString(self)
def getFootSpot(self):
return _fz.Table_getFootSpot(self)
def setCueLength(self, length):
return _fz.Table_setCueLength(self, length)
def getCueLength(self):
return _fz.Table_getCueLength(self)
def setRailHeight(self, height):
return _fz.Table_setRailHeight(self, height)
def getRailHeight(self):
return _fz.Table_getRailHeight(self)
def setMuSliding(self, mu):
return _fz.Table_setMuSliding(self, mu)
def getMuSliding(self):
return _fz.Table_getMuSliding(self)
def setMuRolling(self, mu):
return _fz.Table_setMuRolling(self, mu)
def getMuRolling(self):
return _fz.Table_getMuRolling(self)
def setMuSpinning(self, mu):
return _fz.Table_setMuSpinning(self, mu)
def getMuSpinning(self):
return _fz.Table_getMuSpinning(self)
def getPocketCenter(self, pocket):
return _fz.Table_getPocketCenter(self, pocket)
def getPocketRight(self, pocket):
return _fz.Table_getPocketRight(self, pocket)
def getPocketLeft(self, pocket):
return _fz.Table_getPocketLeft(self, pocket)
if _newclass:
defaultTable = staticmethod(_fz.Table_defaultTable)
else:
defaultTable = _fz.Table_defaultTable
if _newclass:
stateFromPocket = staticmethod(_fz.Table_stateFromPocket)
else:
stateFromPocket = _fz.Table_stateFromPocket
if _newclass:
pocketFromBndId = staticmethod(_fz.Table_pocketFromBndId)
else:
pocketFromBndId = _fz.Table_pocketFromBndId
if _newclass:
bndIdFromPocket = staticmethod(_fz.Table_bndIdFromPocket)
else:
bndIdFromPocket = _fz.Table_bndIdFromPocket
if _newclass:
boundaryName = staticmethod(_fz.Table_boundaryName)
else:
boundaryName = _fz.Table_boundaryName
if _newclass:
pocketName = staticmethod(_fz.Table_pocketName)
else:
pocketName = _fz.Table_pocketName
__swig_destroy__ = _fz.delete_Table
__del__ = lambda self: None
Table_swigregister = _fz.Table_swigregister
Table_swigregister(Table)
def Table_defaultTable():
return _fz.Table_defaultTable()
Table_defaultTable = _fz.Table_defaultTable
def Table_stateFromPocket(pocket):
return _fz.Table_stateFromPocket(pocket)
Table_stateFromPocket = _fz.Table_stateFromPocket
def Table_pocketFromBndId(bnd):
return _fz.Table_pocketFromBndId(bnd)
Table_pocketFromBndId = _fz.Table_pocketFromBndId
def Table_bndIdFromPocket(pocket):
return _fz.Table_bndIdFromPocket(pocket)
Table_bndIdFromPocket = _fz.Table_bndIdFromPocket
def Table_boundaryName(boundary):
return _fz.Table_boundaryName(boundary)
Table_boundaryName = _fz.Table_boundaryName
def Table_pocketName(pocket):
return _fz.Table_pocketName(pocket)
Table_pocketName = _fz.Table_pocketName
class Event(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Event, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Event, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
NO_EVENT = _fz.Event_NO_EVENT
STATE_CHANGE = _fz.Event_STATE_CHANGE
BALL_COLLISION = _fz.Event_BALL_COLLISION
RAIL_COLLISION = _fz.Event_RAIL_COLLISION
POCKETED = _fz.Event_POCKETED
CUE_STRIKE = _fz.Event_CUE_STRIKE
MISCUE = _fz.Event_MISCUE
UNKNOWN_EVENT = _fz.Event_UNKNOWN_EVENT
def getTime(self):
return _fz.Event_getTime(self)
def getBall1(self):
return _fz.Event_getBall1(self)
def getBall1Data(self):
return _fz.Event_getBall1Data(self)
def __lt__(self, other):
return _fz.Event___lt__(self, other)
if _newclass:
eventCmp = staticmethod(_fz.Event_eventCmp)
else:
eventCmp = _fz.Event_eventCmp
def toString(self):
return _fz.Event_toString(self)
def getType(self):
return _fz.Event_getType(self)
def getTypeString(self):
return _fz.Event_getTypeString(self)
def getBall2(self):
return _fz.Event_getBall2(self)
def getBall2Data(self):
return _fz.Event_getBall2Data(self)
def relatedTo(self, other):
return _fz.Event_relatedTo(self, other)
def involvesBall(self, b):
return _fz.Event_involvesBall(self, b)
__swig_destroy__ = _fz.delete_Event
__del__ = lambda self: None
def handle(self, ts, VERBOSE=False):
return _fz.Event_handle(self, ts, VERBOSE)
Event_swigregister = _fz.Event_swigregister
Event_swigregister(Event)
def Event_eventCmp(event1, event2):
return _fz.Event_eventCmp(event1, event2)
Event_eventCmp = _fz.Event_eventCmp
class StateChangeEvent(Event):
__swig_setmethods__ = {}
for _s in [Event]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, StateChangeEvent, name, value)
__swig_getmethods__ = {}
for _s in [Event]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, StateChangeEvent, name)
__repr__ = _swig_repr
def __init__(self, time, b):
this = _fz.new_StateChangeEvent(time, b)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getType(self):
return _fz.StateChangeEvent_getType(self)
def getTypeString(self):
return _fz.StateChangeEvent_getTypeString(self)
__swig_destroy__ = _fz.delete_StateChangeEvent
__del__ = lambda self: None
StateChangeEvent_swigregister = _fz.StateChangeEvent_swigregister
StateChangeEvent_swigregister(StateChangeEvent)
class BallCollisionEvent(Event):
__swig_setmethods__ = {}
for _s in [Event]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, BallCollisionEvent, name, value)
__swig_getmethods__ = {}
for _s in [Event]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, BallCollisionEvent, name)
__repr__ = _swig_repr
def __init__(self, time, b1, b2):
this = _fz.new_BallCollisionEvent(time, b1, b2)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getType(self):
return _fz.BallCollisionEvent_getType(self)
def getTypeString(self):
return _fz.BallCollisionEvent_getTypeString(self)
def relatedTo(self, other):
return _fz.BallCollisionEvent_relatedTo(self, other)
def involvesBall(self, b):
return _fz.BallCollisionEvent_involvesBall(self, b)
__swig_destroy__ = _fz.delete_BallCollisionEvent
__del__ = lambda self: None
def getBall2(self):
return _fz.BallCollisionEvent_getBall2(self)
def getBall2Data(self):
return _fz.BallCollisionEvent_getBall2Data(self)
BallCollisionEvent_swigregister = _fz.BallCollisionEvent_swigregister
BallCollisionEvent_swigregister(BallCollisionEvent)
class RailCollisionEvent(Event):
__swig_setmethods__ = {}
for _s in [Event]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, RailCollisionEvent, name, value)
__swig_getmethods__ = {}
for _s in [Event]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, RailCollisionEvent, name)
__repr__ = _swig_repr
def __init__(self, time, b, rail):
this = _fz.new_RailCollisionEvent(time, b, rail)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getType(self):
return _fz.RailCollisionEvent_getType(self)
def getTypeString(self):
return _fz.RailCollisionEvent_getTypeString(self)
def getRail(self):
return _fz.RailCollisionEvent_getRail(self)
__swig_destroy__ = _fz.delete_RailCollisionEvent
__del__ = lambda self: None
RailCollisionEvent_swigregister = _fz.RailCollisionEvent_swigregister
RailCollisionEvent_swigregister(RailCollisionEvent)
class PocketedEvent(Event):
__swig_setmethods__ = {}
for _s in [Event]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, PocketedEvent, name, value)
__swig_getmethods__ = {}
for _s in [Event]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, PocketedEvent, name)
__repr__ = _swig_repr
def __init__(self, time, b, pocket):
this = _fz.new_PocketedEvent(time, b, pocket)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getType(self):
return _fz.PocketedEvent_getType(self)
def getTypeString(self):
return _fz.PocketedEvent_getTypeString(self)
def getPocket(self):
return _fz.PocketedEvent_getPocket(self)
__swig_destroy__ = _fz.delete_PocketedEvent
__del__ = lambda self: None
PocketedEvent_swigregister = _fz.PocketedEvent_swigregister
PocketedEvent_swigregister(PocketedEvent)
class CueStrikeEvent(Event):
__swig_setmethods__ = {}
for _s in [Event]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, CueStrikeEvent, name, value)
__swig_getmethods__ = {}
for _s in [Event]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, CueStrikeEvent, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _fz.new_CueStrikeEvent(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getType(self):
return _fz.CueStrikeEvent_getType(self)
def getTypeString(self):
return _fz.CueStrikeEvent_getTypeString(self)
def getParams(self):
return _fz.CueStrikeEvent_getParams(self)
__swig_destroy__ = _fz.delete_CueStrikeEvent
__del__ = lambda self: None
CueStrikeEvent_swigregister = _fz.CueStrikeEvent_swigregister
CueStrikeEvent_swigregister(CueStrikeEvent)
class Shot(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Shot, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Shot, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def getEventList(self):
return _fz.Shot_getEventList(self)
def getDuration(self):
return _fz.Shot_getDuration(self)
__swig_destroy__ = _fz.delete_Shot
__del__ = lambda self: None
Shot_swigregister = _fz.Shot_swigregister
Shot_swigregister(Shot)
class TableState(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TableState, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TableState, name)
__repr__ = _swig_repr
MAX_VELOCITY = _fz.TableState_MAX_VELOCITY
MIN_THETA = _fz.TableState_MIN_THETA
MAX_THETA = _fz.TableState_MAX_THETA
OK_PRECONDITION = _fz.TableState_OK_PRECONDITION
BAD_A_VAL = _fz.TableState_BAD_A_VAL
BAD_B_VAL = _fz.TableState_BAD_B_VAL
BAD_THETA_VAL = _fz.TableState_BAD_THETA_VAL
BAD_PHI_VAL = _fz.TableState_BAD_PHI_VAL
BAD_V_VAL = _fz.TableState_BAD_V_VAL
BAD_X_VAL = _fz.TableState_BAD_X_VAL
BAD_Y_VAL = _fz.TableState_BAD_Y_VAL
CUE_STICK_COLLISION = _fz.TableState_CUE_STICK_COLLISION
BALL_OVERLAP = _fz.TableState_BALL_OVERLAP
def __init__(self, *args):
this = _fz.new_TableState(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getNumBalls(self):
return _fz.TableState_getNumBalls(self)
def setBall(self, *args):
return _fz.TableState_setBall(self, *args)
def spotBall(self, *args):
return _fz.TableState_spotBall(self, *args)
def getBall(self, btype):
return _fz.TableState_getBall(self, btype)
def getTable(self):
return _fz.TableState_getTable(self)
def isValidBallPlacement(self, VERBOSE=False):
return _fz.TableState_isValidBallPlacement(self, VERBOSE)
def isPhysicallyPossible(self, shotParams, VERBOSE=False):
return _fz.TableState_isPhysicallyPossible(self, shotParams, VERBOSE)
def addNoise(self, dither):
return _fz.TableState_addNoise(self, dither)
def executeShot(self, sp, verbose=False, errors=False):
return _fz.TableState_executeShot(self, sp, verbose, errors)
def getFirstBallHit(self, sp):
return _fz.TableState_getFirstBallHit(self, sp)
def toString(self):
return _fz.TableState_toString(self)
def fromString(self, s):
return _fz.TableState_fromString(self, s)
__swig_destroy__ = _fz.delete_TableState
__del__ = lambda self: None
TableState_swigregister = _fz.TableState_swigregister
TableState_swigregister(TableState)
def getFastFizVersion():
return _fz.getFastFizVersion()
getFastFizVersion = _fz.getFastFizVersion
def getTestState():
return _fz.getTestState()
getTestState = _fz.getTestState
def getTestShotParams():
return _fz.getTestShotParams()
getTestShotParams = _fz.getTestShotParams
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _fz.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _fz.SwigPyIterator_value(self)
def incr(self, n=1):
return _fz.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _fz.SwigPyIterator_decr(self, n)
def distance(self, x):
return _fz.SwigPyIterator_distance(self, x)
def equal(self, x):
return _fz.SwigPyIterator_equal(self, x)
def copy(self):
return _fz.SwigPyIterator_copy(self)
def next(self):
return _fz.SwigPyIterator_next(self)
def __next__(self):
return _fz.SwigPyIterator___next__(self)
def previous(self):
return _fz.SwigPyIterator_previous(self)
def advance(self, n):
return _fz.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _fz.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _fz.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _fz.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _fz.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _fz.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _fz.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _fz.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
class EventVector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, EventVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, EventVector, name)
__repr__ = _swig_repr
def iterator(self):
return _fz.EventVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _fz.EventVector___nonzero__(self)
def __bool__(self):
return _fz.EventVector___bool__(self)
def __len__(self):
return _fz.EventVector___len__(self)
def __getslice__(self, i, j):
return _fz.EventVector___getslice__(self, i, j)
def __setslice__(self, *args):
return _fz.EventVector___setslice__(self, *args)
def __delslice__(self, i, j):
return _fz.EventVector___delslice__(self, i, j)
def __delitem__(self, *args):
return _fz.EventVector___delitem__(self, *args)
def __getitem__(self, *args):
return _fz.EventVector___getitem__(self, *args)
def __setitem__(self, *args):
return _fz.EventVector___setitem__(self, *args)
def pop(self):
return _fz.EventVector_pop(self)
def append(self, x):
return _fz.EventVector_append(self, x)
def empty(self):
return _fz.EventVector_empty(self)
def size(self):
return _fz.EventVector_size(self)
def swap(self, v):
return _fz.EventVector_swap(self, v)
def begin(self):
return _fz.EventVector_begin(self)
def end(self):
return _fz.EventVector_end(self)
def rbegin(self):
return _fz.EventVector_rbegin(self)
def rend(self):
return _fz.EventVector_rend(self)
def clear(self):
return _fz.EventVector_clear(self)
def get_allocator(self):
return _fz.EventVector_get_allocator(self)
def pop_back(self):
return _fz.EventVector_pop_back(self)
def erase(self, *args):
return _fz.EventVector_erase(self, *args)
def __init__(self, *args):
this = _fz.new_EventVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _fz.EventVector_push_back(self, x)
def front(self):
return _fz.EventVector_front(self)
def back(self):
return _fz.EventVector_back(self)
def assign(self, n, x):
return _fz.EventVector_assign(self, n, x)
def resize(self, *args):
return _fz.EventVector_resize(self, *args)
def insert(self, *args):
return _fz.EventVector_insert(self, *args)
def reserve(self, n):
return _fz.EventVector_reserve(self, n)
def capacity(self):
return _fz.EventVector_capacity(self)
__swig_destroy__ = _fz.delete_EventVector
__del__ = lambda self: None
EventVector_swigregister = _fz.EventVector_swigregister
EventVector_swigregister(EventVector)
DEC_NO_DECISION = _fz.DEC_NO_DECISION
DEC_KEEP_SHOOTING = _fz.DEC_KEEP_SHOOTING
DEC_RERACK = _fz.DEC_RERACK
DEC_EIGHTBALL_RERACK_OPP_SHOOT = _fz.DEC_EIGHTBALL_RERACK_OPP_SHOOT
DEC_CONCEDE = _fz.DEC_CONCEDE
DEC_NINEBALL_PUSH_OUT = _fz.DEC_NINEBALL_PUSH_OUT
TT_NORMAL = _fz.TT_NORMAL
TT_BALL_IN_HAND = _fz.TT_BALL_IN_HAND
TT_BEHIND_LINE = _fz.TT_BEHIND_LINE
TT_RESERVED = _fz.TT_RESERVED
TT_BREAK = _fz.TT_BREAK
TT_WIN = _fz.TT_WIN
TT_EIGHTBALL_FOUL_ON_BREAK = _fz.TT_EIGHTBALL_FOUL_ON_BREAK
TT_EIGHTBALL_8BALL_POCKETED_ON_BREAK = _fz.TT_EIGHTBALL_8BALL_POCKETED_ON_BREAK
TT_NINEBALL_FIRST_SHOT = _fz.TT_NINEBALL_FIRST_SHOT
TT_NINEBALL_PUSH_OUT = _fz.TT_NINEBALL_PUSH_OUT
def __rshift__(arg1, tt):
return _fz.__rshift__(arg1, tt)
__rshift__ = _fz.__rshift__
GT_NONE = _fz.GT_NONE
GT_EIGHTBALL = _fz.GT_EIGHTBALL
GT_NINEBALL = _fz.GT_NINEBALL
GT_SNOOKER = _fz.GT_SNOOKER
GT_ONEPOCKET = _fz.GT_ONEPOCKET
SR_OK = _fz.SR_OK
SR_OK_LOST_TURN = _fz.SR_OK_LOST_TURN
SR_BAD_PARAMS = _fz.SR_BAD_PARAMS
SR_SHOT_IMPOSSIBLE = _fz.SR_SHOT_IMPOSSIBLE
SR_TIMEOUT = _fz.SR_TIMEOUT
class GameShot(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GameShot, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GameShot, name)
__repr__ = _swig_repr
__swig_setmethods__["params"] = _fz.GameShot_params_set
__swig_getmethods__["params"] = _fz.GameShot_params_get
if _newclass:
params = _swig_property(_fz.GameShot_params_get, _fz.GameShot_params_set)
__swig_setmethods__["cue_x"] = _fz.GameShot_cue_x_set
__swig_getmethods__["cue_x"] = _fz.GameShot_cue_x_get
if _newclass:
cue_x = _swig_property(_fz.GameShot_cue_x_get, _fz.GameShot_cue_x_set)
__swig_setmethods__["cue_y"] = _fz.GameShot_cue_y_set
__swig_getmethods__["cue_y"] = _fz.GameShot_cue_y_get
if _newclass:
cue_y = _swig_property(_fz.GameShot_cue_y_get, _fz.GameShot_cue_y_set)
__swig_setmethods__["ball"] = _fz.GameShot_ball_set
__swig_getmethods__["ball"] = _fz.GameShot_ball_get
if _newclass:
ball = _swig_property(_fz.GameShot_ball_get, _fz.GameShot_ball_set)
__swig_setmethods__["pocket"] = _fz.GameShot_pocket_set
__swig_getmethods__["pocket"] = _fz.GameShot_pocket_get
if _newclass:
pocket = _swig_property(_fz.GameShot_pocket_get, _fz.GameShot_pocket_set)
__swig_setmethods__["decision"] = _fz.GameShot_decision_set
__swig_getmethods__["decision"] = _fz.GameShot_decision_get
if _newclass:
decision = _swig_property(_fz.GameShot_decision_get, _fz.GameShot_decision_set)
__swig_setmethods__["timeSpent"] = _fz.GameShot_timeSpent_set
__swig_getmethods__["timeSpent"] = _fz.GameShot_timeSpent_get
if _newclass:
timeSpent = _swig_property(_fz.GameShot_timeSpent_get, _fz.GameShot_timeSpent_set)
def __init__(self):
this = _fz.new_GameShot()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _fz.delete_GameShot
__del__ = lambda self: None
GameShot_swigregister = _fz.GameShot_swigregister
GameShot_swigregister(GameShot)
class GameState(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GameState, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GameState, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
if _newclass:
Factory = staticmethod(_fz.GameState_Factory)
else:
Factory = _fz.GameState_Factory
if _newclass:
RackedState = staticmethod(_fz.GameState_RackedState)
else:
RackedState = _fz.GameState_RackedState
def toString(self):
return _fz.GameState_toString(self)
def gameType(self):
return _fz.GameState_gameType(self)
def isOpenTable(self):
return _fz.GameState_isOpenTable(self)
def getTurnType(self):
return _fz.GameState_getTurnType(self)
def playingSolids(self):
return _fz.GameState_playingSolids(self)
def curPlayerStarted(self):
return _fz.GameState_curPlayerStarted(self)
def timeLeft(self):
return _fz.GameState_timeLeft(self)
def timeLeftOpp(self):
return _fz.GameState_timeLeftOpp(self)
def tableState(self):
return _fz.GameState_tableState(self)
def executeShot(self, shot, shotObj=None):
return _fz.GameState_executeShot(self, shot, shotObj)
__swig_destroy__ = _fz.delete_GameState
__del__ = lambda self: None
GameState_swigregister = _fz.GameState_swigregister
GameState_swigregister(GameState)
def GameState_Factory(*args):
return _fz.GameState_Factory(*args)
GameState_Factory = _fz.GameState_Factory
def GameState_RackedState(gameType):
return _fz.GameState_RackedState(gameType)
GameState_RackedState = _fz.GameState_RackedState
def getRulesVersion():
return _fz.getRulesVersion()
getRulesVersion = _fz.getRulesVersion
# This file is compatible with both classic and new-style classes.
| 32.127371
| 96
| 0.707325
|
4deadb760be3d94df89ca26714f9150629f72a23
| 21
|
py
|
Python
|
example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Logarithms and exponentials/logaddexp2 Logarithm of the sum of exponentiations of the inputs in base-2.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Logarithms and exponentials/logaddexp2 Logarithm of the sum of exponentiations of the inputs in base-2.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Logarithms and exponentials/logaddexp2 Logarithm of the sum of exponentiations of the inputs in base-2.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | 1
|
2021-02-04T04:51:48.000Z
|
2021-02-04T04:51:48.000Z
|
np.logaddexp2(x1, x2)
| 21
| 21
| 0.761905
|
6f15433dd9e6076ad6befd734780764bd55c10f2
| 7,540
|
py
|
Python
|
animation/creation.py
|
kiranajij/manim
|
3f95435b486d874b7b86476dc6293618ed08dab2
|
[
"MIT"
] | null | null | null |
animation/creation.py
|
kiranajij/manim
|
3f95435b486d874b7b86476dc6293618ed08dab2
|
[
"MIT"
] | null | null | null |
animation/creation.py
|
kiranajij/manim
|
3f95435b486d874b7b86476dc6293618ed08dab2
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
import numpy as np
from constants import *
from animation.animation import Animation
from mobject.svg.tex_mobject import TextMobject
from mobject.types.vectorized_mobject import VMobject
from mobject.types.vectorized_mobject import VectorizedPoint
from animation.transform import Transform
from utils.bezier import interpolate
from utils.config_ops import digest_config, __metaclass__
from utils.paths import counterclockwise_path
from utils.rate_functions import double_smooth
from utils.rate_functions import smooth
# Drawing
class ShowPartial(Animation):
def update_submobject(self, submobject, starting_submobject, alpha):
submobject.pointwise_become_partial(
starting_submobject, *self.get_bounds(alpha)
)
def get_bounds(self, alpha):
raise Exception("Not Implemented")
class ShowCreation(ShowPartial):
CONFIG = {
"submobject_mode": "one_at_a_time",
}
def get_bounds(self, alpha):
return (0, alpha)
class Uncreate(ShowCreation):
CONFIG = {
"rate_func": lambda t: smooth(1 - t),
"remover": True
}
class Write(ShowCreation):
CONFIG = {
"rate_func": None,
"submobject_mode": "lagged_start",
}
def __init__(self, mob_or_text, **kwargs):
digest_config(self, kwargs)
if isinstance(mob_or_text, str):
mobject = TextMobject(mob_or_text)
else:
mobject = mob_or_text
if "run_time" not in kwargs:
self.establish_run_time(mobject)
if "lag_factor" not in kwargs:
if len(mobject.family_members_with_points()) < 4:
min_lag_factor = 1
else:
min_lag_factor = 2
self.lag_factor = max(self.run_time - 1, min_lag_factor)
ShowCreation.__init__(self, mobject, **kwargs)
def establish_run_time(self, mobject):
num_subs = len(mobject.family_members_with_points())
if num_subs < 15:
self.run_time = 1
else:
self.run_time = 2
class DrawBorderThenFill(Animation):
CONFIG = {
"run_time": 2,
"stroke_width": 2,
"stroke_color": None,
"rate_func": double_smooth,
}
def __init__(self, vmobject, **kwargs):
if not isinstance(vmobject, VMobject):
raise Exception("DrawBorderThenFill only works for VMobjects")
self.reached_halfway_point_before = False
Animation.__init__(self, vmobject, **kwargs)
def update_submobject(self, submobject, starting_submobject, alpha):
submobject.pointwise_become_partial(
starting_submobject, 0, min(2 * alpha, 1)
)
if alpha < 0.5:
if self.stroke_color:
color = self.stroke_color
elif starting_submobject.stroke_width > 0:
color = starting_submobject.get_stroke_color()
else:
color = starting_submobject.get_color()
submobject.set_stroke(color, width=self.stroke_width)
submobject.set_fill(opacity=0)
else:
if not self.reached_halfway_point_before:
self.reached_halfway_point_before = True
submobject.points = np.array(starting_submobject.points)
width, opacity = [
interpolate(start, end, 2 * alpha - 1)
for start, end in [
(self.stroke_width, starting_submobject.get_stroke_width()),
(0, starting_submobject.get_fill_opacity())
]
]
submobject.set_stroke(width=width)
submobject.set_fill(opacity=opacity)
# Fading
class FadeOut(Transform):
CONFIG = {
"remover": True,
}
def __init__(self, mobject, **kwargs):
target = mobject.copy()
target.fade(1)
Transform.__init__(self, mobject, target, **kwargs)
def clean_up(self, surrounding_scene=None):
Transform.clean_up(self, surrounding_scene)
self.update(0)
class FadeIn(Transform):
def __init__(self, mobject, **kwargs):
target = mobject.copy()
Transform.__init__(self, mobject, target, **kwargs)
self.starting_mobject.fade(1)
if isinstance(self.starting_mobject, VMobject):
self.starting_mobject.set_stroke(width=0)
self.starting_mobject.set_fill(opacity=0)
class FadeInAndShiftFromDirection(Transform):
CONFIG = {
"direction": DOWN,
}
def __init__(self, mobject, direction=None, **kwargs):
digest_config(self, kwargs)
target = mobject.copy()
if direction is None:
direction = self.direction
mobject.shift(direction)
mobject.fade(1)
Transform.__init__(self, mobject, target, **kwargs)
class FadeInFromDown(FadeInAndShiftFromDirection):
"""
Essential a more convenient form of FadeInAndShiftFromDirection
"""
CONFIG = {
"direction": DOWN,
}
class FadeOutAndShift(FadeOut):
CONFIG = {
"direction": DOWN,
}
def __init__(self, mobject, direction=None, **kwargs):
FadeOut.__init__(self, mobject, **kwargs)
if direction is None:
direction = self.direction
self.target_mobject.shift(direction)
class FadeOutAndShiftDown(FadeOutAndShift):
CONFIG = {
"direction": DOWN,
}
class VFadeIn(Animation):
"""
VFadeIn and VFadeOut only work for VMobjects, but they can be applied
to mobjects while they are being animated in some other way (e.g. shifting
then) in a way that does not work with FadeIn and FadeOut
"""
def update_submobject(self, submobject, starting_submobject, alpha):
submobject.set_stroke(
width=interpolate(0, starting_submobject.get_stroke_width(), alpha)
)
submobject.set_fill(
opacity=interpolate(0, starting_submobject.get_fill_opacity(), alpha)
)
class VFadeOut(VFadeIn):
CONFIG = {
"remover": True
}
def update_submobject(self, submobject, starting_submobject, alpha):
VFadeIn.update_submobject(
self, submobject, starting_submobject, 1 - alpha
)
# Growing
class GrowFromPoint(Transform):
CONFIG = {
"point_color": None,
}
def __init__(self, mobject, point, **kwargs):
digest_config(self, kwargs)
target = mobject.copy()
point_mob = VectorizedPoint(point)
if self.point_color:
point_mob.set_color(self.point_color)
mobject.replace(point_mob)
mobject.set_color(point_mob.get_color())
Transform.__init__(self, mobject, target, **kwargs)
class GrowFromCenter(GrowFromPoint):
def __init__(self, mobject, **kwargs):
GrowFromPoint.__init__(self, mobject, mobject.get_center(), **kwargs)
class GrowFromEdge(GrowFromPoint):
def __init__(self, mobject, edge, **kwargs):
GrowFromPoint.__init__(
self, mobject, mobject.get_critical_point(edge), **kwargs
)
class GrowArrow(GrowFromPoint):
def __init__(self, arrow, **kwargs):
GrowFromPoint.__init__(self, arrow, arrow.get_start(), **kwargs)
class SpinInFromNothing(GrowFromCenter):
CONFIG = {
"path_func": counterclockwise_path()
}
class ShrinkToCenter(Transform):
def __init__(self, mobject, **kwargs):
Transform.__init__(
self, mobject, mobject.get_point_mobject(), **kwargs
)
| 28.888889
| 81
| 0.643236
|
f37a1a60a0ccfb59acce6f2c9645402234825282
| 3,481
|
py
|
Python
|
inventory/inventory/report/pending_order_teralokasi/pending_order_teralokasi.py
|
riconova92/inventory
|
7cc4f49bda31f802af36ee4ea6eb43092b5094a7
|
[
"MIT"
] | null | null | null |
inventory/inventory/report/pending_order_teralokasi/pending_order_teralokasi.py
|
riconova92/inventory
|
7cc4f49bda31f802af36ee4ea6eb43092b5094a7
|
[
"MIT"
] | null | null | null |
inventory/inventory/report/pending_order_teralokasi/pending_order_teralokasi.py
|
riconova92/inventory
|
7cc4f49bda31f802af36ee4ea6eb43092b5094a7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2013, Myme and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
def execute(filters=None):
columns, data = [], []
columns = []
select_field = ""
group_clause = ""
order_clause = ""
left_join = ""
if filters.get("group_by") == "Customer" :
columns = ["Customer:Link/Customer:100","Item Code:Link/Item:100","Colour:Data:100","Qty Pending Order:Float:150","Qty Sisa di Pending Order:Float:200",
"Qty Teralokasi:Float:150"]
select_field = " po.`customer`,por.`item_code_roll`,por.`colour`,por.`roll_qty`,por.`qty_sisa`,por.`qty_dialokasi` "
order_clause = " ORDER BY po.`customer` "
elif filters.get("group_by") == "Item" :
columns = ["Item Code:Link/Item:100","Colour:Data:100","Qty Pending Order:Float:150","Qty Sisa di Pending Order:Float:200",
"Qty Teralokasi:Float:150"]
select_field = " por.`item_code_roll`,por.`colour`,por.`roll_qty`,por.`qty_sisa`,por.`qty_dialokasi` "
iorder_clause = " ORDER BY por.`item_code_roll` "
elif filters.get("group_by") == "Colour":
columns = ["Colour:Data:100","Item Code:Link/Item:100","Qty Pending Order:Float:150","Qty Sisa di Pending Order:Float:200",
"Qty Teralokasi:Float:150"]
select_field = " por.`colour`,por.`item_code_roll`,por.`roll_qty`,por.`qty_sisa`,por.`qty_dialokasi` "
order_clause = " ORDER BY por.`colour` "
elif filters.get("group_by") == "Pending Order" :
columns = ["Pending Order No.:Link/Pending Order:100","Item Code:Link/Item:100","Colour:Data:100","Qty Pending Order:Float:150","Qty Sisa di Pending Order:Float:200",
"Alokasi No.:Link/Alokasi Barang:100","Qty Alokasi:Float:100"]
select_field = " po.`name`,por.`item_code_roll`,por.`colour`,por.`roll_qty`,por.`qty_sisa`,ab.`name`,abd.`roll_qty` "
left_join = """ LEFT JOIN `tabAlokasi Barang`ab ON ab.`pending_order`=po.`name` AND ab.`docstatus`=1
LEFT JOIN `tabAlokasi Barang Data`abd ON ab.`name`=abd.`parent`
AND abd.`item_code_roll`=por.`item_code_roll` AND abd.`colour`=por.`colour` """
order_clause = " ORDER BY po.`name` "
else :
return [],[]
po_clause = ""
if filters.get("pending_order") :
po_clause = """ AND po.`name`="{0}" """.format(filters.get("pending_order"))
item_clause = ""
if filters.get("item") :
item_clause = """ AND por.`item_code_roll`="{0}" """.format(filters.get("item"))
customer_clause = ""
if filters.get("customer") :
customer_clause = """ AND po.`customer`="{0}" """.format(filters.get("customer"))
colour_clause = ""
if filters.get("colour") :
colour_clause = """ AND por.`colour`="{0}" """.format(filters.get("colour"))
delivery_clause = ""
if filters.get("delivery_from_date") and filters.get("delivery_to_date"):
delivery_clause = """ AND po.`expected_delivery_date` BETWEEN "{0}" AND "{1}" """.format(filters.get("delivery_from_date"),filters.get("delivery_to_date"))
date_clause = ""
if filters.get("posting_from_date") and filters.get("posting_to_date"):
delivery_clause = """ AND po.`posting_date` BETWEEN "{0}" AND "{1}" """.format(filters.get("posting_from_date"),filters.get("posting_to_date"))
data = frappe.db.sql(""" SELECT {0}
FROM `tabPending Order`po
JOIN `tabPending Order Roll`por ON por.`parent`=po.`name`
{1}
WHERE po.`docstatus`=1
{2} {3} {4} {5} {6} {7}
{8} """.format(select_field,left_join,po_clause,item_clause,customer_clause,colour_clause,delivery_clause,date_clause,order_clause))
return columns, data
| 46.413333
| 168
| 0.691468
|
0bac2af616e58f8a8bf30007437aa5a408bee54a
| 11,266
|
py
|
Python
|
src/pybel/struct/pipeline/pipeline.py
|
tehw0lf/pybel
|
6f67f8cce15052cc3c42ef87374e3b9ee45e6519
|
[
"Apache-2.0"
] | null | null | null |
src/pybel/struct/pipeline/pipeline.py
|
tehw0lf/pybel
|
6f67f8cce15052cc3c42ef87374e3b9ee45e6519
|
[
"Apache-2.0"
] | null | null | null |
src/pybel/struct/pipeline/pipeline.py
|
tehw0lf/pybel
|
6f67f8cce15052cc3c42ef87374e3b9ee45e6519
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""This module holds the Pipeline class."""
import json
import logging
import types
from functools import wraps
from .decorators import get_transformation, in_place_map, mapped, universe_map
from .exc import MetaValueError, MissingPipelineFunctionError, MissingUniverseError
from ..operations import node_intersection, union
__all__ = [
'Pipeline',
]
log = logging.getLogger(__name__)
META_UNION = 'union'
META_INTERSECTION = 'intersection'
def _get_protocol_tuple(data):
"""Convert a dictionary to a tuple.
:param dict data:
:rtype: tuple[str,list,dict]
"""
return data['function'], data.get('args', []), data.get('kwargs', {})
class Pipeline:
"""Builds and runs analytical pipelines on BEL graphs.
Example usage:
>>> from pybel import BELGraph
>>> from pybel.struct.pipeline import Pipeline
>>> from pybel.struct.mutation import enrich_protein_and_rna_origins, prune_protein_rna_origins
>>> graph = BELGraph()
>>> example = Pipeline()
>>> example.append(enrich_protein_and_rna_origins)
>>> example.append(prune_protein_rna_origins)
>>> result = example.run(graph)
"""
def __init__(self, protocol=None):
"""
:param iter[dict] protocol: An iterable of dictionaries describing how to transform a network
"""
self.universe = None
self.protocol = protocol or []
def __len__(self):
return len(self.protocol)
def __iter__(self):
return iter(self.protocol)
@staticmethod
def from_functions(functions):
"""Build a pipeline from a list of functions.
:param functions: A list of functions or names of functions
:type functions: iter[((pybel.BELGraph) -> pybel.BELGraph) or ((pybel.BELGraph) -> None) or str]
:rtype: Pipeline
Example with function:
>>> from pybel.struct.pipeline import Pipeline
>>> from pybel.struct.mutation import remove_associations
>>> pipeline = Pipeline.from_functions([remove_associations])
Equivalent example with function names:
>>> from pybel.struct.pipeline import Pipeline
>>> pipeline = Pipeline.from_functions(['remove_associations'])
Lookup by name is possible for built in functions, and those that have been registered correctly using one of
the four decorators:
1. :func:`pybel.struct.pipeline.transformation`,
2. :func:`pybel.struct.pipeline.in_place_transformation`,
3. :func:`pybel.struct.pipeline.uni_transformation`,
4. :func:`pybel.struct.pipeline.uni_in_place_transformation`,
"""
result = Pipeline()
for func in functions:
result.append(func)
return result
def _get_function(self, name):
"""Wrap a function with the universe and in-place.
:param str name: The name of the function
:rtype: types.FunctionType
:raises MissingPipelineFunctionError: If the functions is not registered
"""
f = mapped.get(name)
if f is None:
raise MissingPipelineFunctionError('{} is not registered as a pipeline function'.format(name))
if name in universe_map and name in in_place_map:
return self._wrap_in_place(self._wrap_universe(f))
if name in universe_map:
return self._wrap_universe(f)
if name in in_place_map:
return self._wrap_in_place(f)
return f
def append(self, name, *args, **kwargs):
"""Add a function (either as a reference, or by name) and arguments to the pipeline.
:param name: The name of the function
:type name: str or (pybel.BELGraph -> pybel.BELGraph)
:param args: The positional arguments to call in the function
:param kwargs: The keyword arguments to call in the function
:return: This pipeline for fluid query building
:rtype: Pipeline
:raises MissingPipelineFunctionError: If the function is not registered
"""
if isinstance(name, types.FunctionType):
return self.append(name.__name__, *args, **kwargs)
elif isinstance(name, str):
get_transformation(name)
else:
raise TypeError('invalid function argument: {}'.format(name))
av = {
'function': name,
}
if args:
av['args'] = args
if kwargs:
av['kwargs'] = kwargs
self.protocol.append(av)
return self
def extend(self, protocol):
"""Add another pipeline to the end of the current pipeline.
:param protocol: An iterable of dictionaries (or another Pipeline)
:type protocol: iter[dict] or Pipeline
:return: This pipeline for fluid query building
:rtype: Pipeline
Example:
>>> p1 = Pipeline.from_functions(['enrich_protein_and_rna_origins'])
>>> p2 = Pipeline.from_functions(['remove_pathologies'])
>>> p1.extend(p2)
"""
for data in protocol:
name, args, kwargs = _get_protocol_tuple(data)
self.append(name, *args, **kwargs)
return self
def _run_helper(self, graph, protocol):
"""Help run the protocol.
:param pybel.BELGraph graph: A BEL graph
:param list[dict] protocol: The protocol to run, as JSON
:rtype: pybel.BELGraph
"""
result = graph
for entry in protocol:
meta_entry = entry.get('meta')
if meta_entry is None:
name, args, kwargs = _get_protocol_tuple(entry)
func = self._get_function(name)
result = func(result, *args, **kwargs)
else:
networks = (
self._run_helper(graph, subprotocol)
for subprotocol in entry['pipelines']
)
if meta_entry == META_UNION:
result = union(networks)
elif meta_entry == META_INTERSECTION:
result = node_intersection(networks)
else:
raise MetaValueError('invalid meta-command: {}'.format(meta_entry))
return result
def run(self, graph, universe=None):
"""Run the contained protocol on a seed graph.
:param pybel.BELGraph graph: The seed BEL graph
:param pybel.BELGraph universe: Allows just-in-time setting of the universe in case it wasn't set before.
Defaults to the given network.
:param bool in_place: Should the graph be copied before applying the algorithm?
:return: The new graph is returned if not applied in-place
:rtype: pybel.BELGraph
"""
self.universe = universe or graph.copy()
return self._run_helper(graph.copy(), self.protocol)
def __call__(self, graph, universe=None):
"""Call :meth:`Pipeline.run`.
:param pybel.BELGraph graph: The seed BEL graph
:param pybel.BELGraph universe: Allows just-in-time setting of the universe in case it wasn't set before.
Defaults to the given network.
:param bool in_place: Should the graph be copied before applying the algorithm?
:return: The new graph is returned if not applied in-place
:rtype: pybel.BELGraph
Using __call__ allows for methods to be chained together then applied
>>> from pybel.struct.mutation import remove_associations, remove_pathologies
>>> from pybel.struct.pipeline.pipeline import Pipeline
>>> from pybel import BELGraph
>>> pipe = Pipeline.from_functions([remove_associations, remove_pathologies])
>>> graph = BELGraph() ...
>>> new_graph = pipe(graph)
"""
return self.run(graph=graph, universe=universe)
def _wrap_universe(self, func):
"""Take a function that needs a universe graph as the first argument and returns a wrapped one."""
@wraps(func)
def wrapper(graph, *args, **kwargs):
"""Applies the enclosed function with the universe given as the first argument"""
if self.universe is None:
raise MissingUniverseError(
'Can not run universe function [{}] - No universe is set'.format(func.__name__))
return func(self.universe, graph, *args, **kwargs)
return wrapper
@staticmethod
def _wrap_in_place(func):
"""Take a function that doesn't return the graph and returns the graph."""
@wraps(func)
def wrapper(graph, *args, **kwargs):
"""Applies the enclosed function and returns the graph"""
func(graph, *args, **kwargs)
return graph
return wrapper
def to_json(self):
"""Return this pipeline as a JSON list.
:rtype: list
"""
return self.protocol
def dumps(self, **kwargs):
"""Dump this pipeline as a JSON string.
:rtype: str
"""
return json.dumps(self.to_json(), **kwargs)
def dump(self, file, **kwargs):
"""Dump this protocol to a file in JSON.
:param file: A file or file-like to pass to :func:`json.dump`
"""
return json.dump(self.to_json(), file, **kwargs)
@staticmethod
def load(file):
"""Load a protocol from JSON contained in file.
:param file: A file or file-like
:return: The pipeline represented by the JSON in the file
:rtype: Pipeline
:raises MissingPipelineFunctionError: If any functions are not registered
"""
return Pipeline(json.load(file))
@staticmethod
def loads(s):
"""Load a protocol from a JSON string.
:param str s: A JSON string
:return: The pipeline represented by the JSON in the file
:rtype: Pipeline
:raises MissingPipelineFunctionError: If any functions are not registered
"""
return Pipeline(json.loads(s))
def __str__(self):
return json.dumps(self.protocol, indent=2)
@staticmethod
def _build_meta(meta, pipelines):
"""
:param str meta: either union or intersection
:param iter[Pipeline] pipelines:
:rtype: Pipeline
"""
return Pipeline(protocol=[
{
'meta': meta,
'pipelines': [
pipeline.protocol
for pipeline in pipelines
]
},
])
@staticmethod
def union(pipelines):
"""Take the union of multiple pipelines.
:param iter[Pipeline] pipelines: A list of pipelines
:return: The union of the results from multiple pipelines
:rtype: Pipeline
"""
return Pipeline._build_meta(META_UNION, pipelines)
@staticmethod
def intersection(pipelines):
"""Take the intersection of the results from multiple pipelines.
:param iter[Pipeline] pipelines: A list of pipelines
:return: The intersection of results from multiple pipelines
:rtype: Pipeline
"""
return Pipeline._build_meta(META_INTERSECTION, pipelines)
| 32.655072
| 117
| 0.616723
|
6b8d3aa9d06b28e21bcc5efb962abeec7309e08c
| 877
|
py
|
Python
|
capricorn/__init__.py
|
WenchenLi/nlp_vocab
|
9fb557b2e70ab378395e9f9548a7a61d24fd1e5d
|
[
"MIT"
] | 5
|
2018-12-07T12:35:07.000Z
|
2020-07-04T04:33:13.000Z
|
capricorn/__init__.py
|
WenchenLi/nlp_vocab
|
9fb557b2e70ab378395e9f9548a7a61d24fd1e5d
|
[
"MIT"
] | null | null | null |
capricorn/__init__.py
|
WenchenLi/nlp_vocab
|
9fb557b2e70ab378395e9f9548a7a61d24fd1e5d
|
[
"MIT"
] | 1
|
2020-09-06T12:05:50.000Z
|
2020-09-06T12:05:50.000Z
|
# Copyright 2017 The Wenchen Li. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
init capricorn
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from capricorn.VocabularyProcessor import VocabularyProcessor
| 36.541667
| 80
| 0.712657
|
8d228c0c6f41b305fd04d21d56e9b97f3c152ab6
| 59,281
|
py
|
Python
|
controllers/inv.py
|
nursix/eden
|
61d5a947da20bbf4d6458c9be88ed37b1330518c
|
[
"MIT"
] | 4
|
2015-04-08T19:51:44.000Z
|
2016-08-06T07:05:35.000Z
|
controllers/inv.py
|
nursix/eden
|
61d5a947da20bbf4d6458c9be88ed37b1330518c
|
[
"MIT"
] | 27
|
2015-02-18T23:38:23.000Z
|
2020-04-27T13:53:23.000Z
|
controllers/inv.py
|
nursix/eden
|
61d5a947da20bbf4d6458c9be88ed37b1330518c
|
[
"MIT"
] | 5
|
2015-09-10T05:31:14.000Z
|
2017-06-07T11:06:27.000Z
|
# -*- coding: utf-8 -*-
"""
Inventory Management
A module to record Inventories of Items at Sites,
including Warehouses, Offices, Shelters & Hospitals
"""
if not settings.has_module(c):
raise HTTP(404, body="Module disabled: %s" % c)
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
return settings.customise_home(c, alt_function="index_alt")
# -----------------------------------------------------------------------------
def index_alt():
"""
Module homepage for non-Admin users when no CMS content found
"""
# Just redirect to the Warehouse Summary View
s3_redirect_default(URL(f="warehouse",
args = "summary",
))
# =============================================================================
def warehouse():
"""
RESTful CRUD controller
"""
# Defined in the model for forwards from org/site controller
from s3db.inv import inv_warehouse_controller
return inv_warehouse_controller()
# -----------------------------------------------------------------------------
def warehouse_type():
"""
RESTful CRUD controller
"""
return s3_rest_controller()
# =============================================================================
def inv_item():
""" REST Controller """
if settings.get_inv_direct_stock_edits():
# Limit site_id to sites the user has permissions for
auth.permitted_facilities(table = s3db.inv_inv_item,
error_msg = T("You do not have permission for any site to add an inventory item."))
# Import pre-process
def import_prep(data):
"""
Process option to Delete all Stock records of the Organisation/Branch
before processing a new data import
"""
if s3.import_replace:
resource, tree = data
if tree is not None:
xml = current.xml
tag = xml.TAG
att = xml.ATTRIBUTE
root = tree.getroot()
expr = "/%s/%s[@%s='org_organisation']/%s[@%s='name']" % \
(tag.root, tag.resource, att.name, tag.data, att.field)
orgs = root.xpath(expr)
otable = s3db.org_organisation
stable = s3db.org_site
itable = s3db.inv_inv_item
for org in orgs:
org_name = org.get("value", None) or org.text
if org_name:
try:
org_name = json.loads(xml.xml_decode(org_name))
except:
pass
if org_name:
query = (otable.name == org_name) & \
(stable.organisation_id == otable.id) & \
(itable.site_id == stable.id)
resource = s3db.resource("inv_inv_item",
filter = query,
)
# Use cascade = True so that the deletion gets
# rolled back if the import fails:
resource.delete(format = "xml",
cascade = True,
)
resource.skip_import = True
s3.import_prep = import_prep
def prep(r):
#if r.method != "report":
# s3.dataTable_group = 1
if r.component:
#component_name = r.component_name
if r.component_name == "adj_item":
s3db.configure("inv_adj_item",
deletable = False,
editable = False,
insertable = False,
)
# We can't update this dynamically
#elif component_name == "bin":
# s3db.inv_inv_item_bin.quantity.requires = IS_INT_IN_RANGE(0, r.record.quantity)
else:
tablename = "inv_inv_item"
s3.crud_strings[tablename].msg_list_empty = T("No Stock currently registered")
if r.method == "report":
# Quantity 0 can still be used for managing Stock Replenishment
s3.filter = (r.table.quantity != 0)
report = get_vars.get("report")
if report == "mon":
# Monetization Report
s3.crud_strings[tablename].update({"title_list": T("Monetization Report"),
"subtitle_list": T("Monetization Details"),
#"msg_list_empty": T("No Stock currently registered"),
})
s3db.configure(tablename,
list_fields = [(T("Donor"), "supply_org_id"),
(T("Items/Description"), "item_id"),
(T("Quantity"), "quantity"),
(T("Unit"), "item_pack_id"),
(T("Unit Value"), "pack_value"),
(T("Total Value"), "total_value"),
(T("Remarks"), "comments"),
"status",
]
)
if r.interactive and \
r.method in (None, "update", "summary") and \
settings.get_inv_direct_stock_edits():
# Limit to Bins from this site
# Validate Bin Quantities
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_item.js" % r.application)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_item.min.js" % r.application)
record = r.record
if record:
site_id = record.site_id
ibtable = s3db.inv_inv_item_bin
# We can't update this dynamically
#ibtable.quantity.requires = IS_INT_IN_RANGE(0, r.record.quantity)
sum_field = ibtable.quantity.sum()
binned = db(ibtable.inv_item_id == r.id).select(sum_field,
limitby = (0, 1),
orderby = sum_field,
).first()[sum_field]
if binned:
# This is in the current Pack units
binned = '''
S3.supply.binnedQuantity=%s''' % binned
else:
binned = ""
# Need to transmit the current item_pack_id as not included in the IS_ONE_OF_EMPTY_SELECT widget
# Also send the current pack details to avoid an AJAX call
item_id = record.item_id
ptable = s3db.supply_item_pack
rows = db(ptable.item_id == item_id).select(ptable.id,
ptable.name,
ptable.quantity,
)
# Simplify format
packs = {item_id: [{"i": row.id,
"n": row.name,
"q": row.quantity,
} for row in rows],
}
SEPARATORS = (",", ":")
packs = json.dumps(packs, separators=SEPARATORS)
s3.js_global.append('''S3.supply.packs=%s
S3.supply.itemPackID=%s%s''' % (packs,
record.item_pack_id,
binned,
))
f = ibtable.layout_id
f.widget.filter = (s3db.org_site_layout.site_id == site_id)
f.comment.args = [site_id, "layout", "create"]
# We can't update this dynamically
#f.requires.other.set_filter(filterby = "site_id",
# filter_opts = [site_id],
# )
return True
s3.prep = prep
def postp(r, output):
if r.interactive and \
r.component_name == "adj_item":
# Add Button for New Adjustment
_href = URL(c="inv", f="adj",
vars = {"item": r.id,
"site": r.record.site_id,
},
)
from s3 import S3CRUD
add_btn = S3CRUD.crud_button(label = T("New Adjustment"),
_href = _href,
_id = "add-btn",
)
if settings.ui.formstyle == "bootstrap":
add_btn.add_class("btn btn-primary")
else:
add_btn.add_class("action-btn")
output["buttons"] = {"add_btn": add_btn,
}
return output
s3.postp = postp
from s3db.inv import inv_rheader
return s3_rest_controller(#csv_extra_fields = [{"label": "Organisation",
# "field": s3db.org_organisation_id(comment = None)
# },
# ],
pdf_orientation = "Landscape",
pdf_table_autogrow = "B",
pdf_groupby = "site_id, item_id",
pdf_orderby = "expiry_date, supply_org_id",
replace_option = T("Remove existing data before import"),
rheader = inv_rheader,
)
# -----------------------------------------------------------------------------
def inv_item_bin():
"""
RESTful CRUD controller
- just used for options.s3json lookups
"""
s3.prep = lambda r: \
r.representation == "s3json" and r.method == "options"
return s3_rest_controller()
# =============================================================================
def adj():
"""
RESTful CRUD controller for Stock Adjustments
"""
table = s3db.inv_adj
# Limit site_id to sites the user has permissions for
error_msg = T("You do not have permission to adjust the stock level in this warehouse.")
auth.permitted_facilities(table = table,
error_msg = error_msg)
from s3db.inv import inv_adj_close
s3db.set_method("inv", "adj",
method = "close",
action = inv_adj_close,
)
def prep(r):
if r.interactive:
if r.component:
if r.component_name == "adj_item":
adj_status = r.record.status
if adj_status:
s3db.configure("inv_adj_item",
editable = False,
insertable = False,
)
else:
# Limit to Bins from this site
from s3db.org import org_site_layout_config
org_site_layout_config(r.record.site_id, s3db.inv_adj_item_bin.layout_id)
# Validate Bin Quantities
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_adj_item.js" % r.application)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_adj_item.min.js" % r.application)
if r.component_id:
aitable = s3db.inv_adj_item
if adj_status == 0:
aitable.reason.writable = True
record = db(aitable.id == r.component_id).select(aitable.inv_item_id,
aitable.old_quantity,
limitby = (0, 1),
).first()
if record.inv_item_id:
aitable.item_id.writable = False
aitable.item_id.comment = None
aitable.item_pack_id.writable = False
abtable = s3db.inv_adj_item_bin
sum_field = abtable.quantity.sum()
binned = db(abtable.adj_item_id == r.component_id).select(sum_field,
limitby = (0, 1),
orderby = sum_field,
).first()[sum_field]
if binned:
s3.js_global.append('''S3.supply.binnedQuantity=%s
S3.supply.oldQuantity=%s''' % (binned, record.old_quantity))
elif r.component_name == "image":
doc_table = s3db.doc_image
doc_table.organisation_id.readable = doc_table.organisation_id.writable = False
doc_table.person_id.readable = doc_table.person_id.writable = False
doc_table.location_id.readable = doc_table.location_id.writable = False
else:
if r.record:
if r.record.status:
# Don't allow modifying completed adjustments
#table.adjuster_id.writable = False
#table.comments.writable = False
s3db.configure("inv_adj",
deletable = False,
editable = False,
)
else:
# Don't allow switching Site after Adjustment created as the Items in the Adjustment match the original Site
table.site_id.writable = False
else:
if "item" in get_vars and "site" in get_vars:
# Create a adj record with a single adj_item record
# e.g. coming from New Adjustment button on inv/inv_item/x/adj_item tab
# e.g. coming from Adjust Stock Item button on inv/site/inv_item/x tab
# @ToDo: This should really be a POST, not a GET
inv_item_id = get_vars.item
inv_item_table = s3db.inv_inv_item
inv_item = db(inv_item_table.id == inv_item_id).select(inv_item_table.id,
inv_item_table.item_id,
inv_item_table.item_pack_id,
inv_item_table.quantity,
inv_item_table.currency,
inv_item_table.status,
inv_item_table.pack_value,
inv_item_table.expiry_date,
inv_item_table.owner_org_id,
limitby = (0, 1),
).first()
inv_bin_table = s3db.inv_inv_item_bin
bins = db(inv_bin_table.inv_item_id == inv_item_id).select(inv_bin_table.layout_id,
inv_bin_table.quantity,
)
item_id = inv_item.item_id
adj_id = table.insert(adjuster_id = auth.s3_logged_in_person(),
site_id = get_vars.site,
adjustment_date = request.utcnow,
status = 0,
category = 1,
comments = "Adjust %s" % inv_item_table.item_id.represent(item_id, show_link=False),
)
adj_bin_table = s3db.inv_adj_item_bin
adj_item_table = s3db.inv_adj_item
adj_item_id = adj_item_table.insert(reason = 0, # Unknown
adj_id = adj_id,
inv_item_id = inv_item.id, # original source inv_item
item_id = item_id, # the supply item
item_pack_id = inv_item.item_pack_id,
old_quantity = inv_item.quantity,
currency = inv_item.currency,
old_status = inv_item.status,
new_status = inv_item.status,
old_pack_value = inv_item.pack_value,
new_pack_value = inv_item.pack_value,
expiry_date = inv_item.expiry_date,
old_owner_org_id = inv_item.owner_org_id,
new_owner_org_id = inv_item.owner_org_id,
)
for row in bins:
adj_bin_table.insert(adj_item_id = adj_item_id,
layout_id = row.layout_id,
quantity = row.quantity,
)
redirect(URL(c = "inv",
f = "adj",
args = [adj_id,
"adj_item",
adj_item_id,
"update",
]
))
else:
table.comments.default = "Complete Stock Adjustment"
if "site" in get_vars:
table.site_id.writable = True
table.site_id.default = get_vars.site
return True
s3.prep = prep
def postp(r, output):
if r.interactive:
s3_action_buttons(r, deletable=False)
return output
s3.postp = postp
from s3db.inv import inv_adj_rheader
return s3_rest_controller(rheader = inv_adj_rheader)
# -----------------------------------------------------------------------------
def adj_item():
"""
RESTful CRUD controller for Adjustment Items
- just used for options.s3json lookups
"""
s3.prep = lambda r: \
r.representation == "s3json" and r.method == "options"
return s3_rest_controller()
# -----------------------------------------------------------------------------
def adj_item_bin():
"""
RESTful CRUD controller for Adjustment Item Bins
- just used for options.s3json lookups
"""
s3.prep = lambda r: \
r.representation == "s3json" and r.method == "options"
return s3_rest_controller()
# =============================================================================
def kitting():
"""
RESTful CRUD controller for Kitting
"""
from s3db.inv import inv_rheader
return s3_rest_controller(rheader = inv_rheader)
# =============================================================================
def recv():
""" RESTful CRUD controller """
from s3db.inv import inv_recv_controller
return inv_recv_controller()
# -----------------------------------------------------------------------------
def recv_item_bin():
"""
RESTful CRUD controller
- just used for options.s3json lookups
"""
s3.prep = lambda r: \
r.representation == "s3json" and r.method == "options"
return s3_rest_controller()
# -----------------------------------------------------------------------------
def send():
""" RESTful CRUD controller """
from s3db.inv import inv_send_controller
return inv_send_controller()
# -----------------------------------------------------------------------------
def send_item_bin():
"""
RESTful CRUD controller
- just used for options.s3json lookups
"""
s3.prep = lambda r: \
r.representation == "s3json" and r.method == "options"
return s3_rest_controller()
# -----------------------------------------------------------------------------
def track_item():
""" RESTful CRUD controller """
table = s3db.inv_track_item
# Only used for Read-only Reports
s3db.configure("inv_track_item",
deletable = False,
editable = False,
insertable = False,
)
viewing = get_vars.get("viewing")
if viewing:
# Track Shipment
dummy, item_id = viewing.split(".")
if item_id != "None":
s3.filter = (table.send_inv_item_id == item_id ) | \
(table.recv_inv_item_id == item_id)
list_fields = None # Configure later (DRY)
else:
report = get_vars.get("report")
if report == "rel":
# Summary of Releases
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Summary of Releases"),
subtitle_list = T("Summary Details"),
)
s3.filter = (FS("send_id") != None)
list_fields = [#"send_id",
#"req_item_id",
(T("Date Released"), "send_id$date"),
(T("Beneficiary"), "send_id$site_id"),
(settings.get_inv_send_shortname(), "send_id$send_ref"),
(T("Items/Description"), "item_id"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
]
if settings.get_inv_send_req():
list_fields.insert(3, (settings.get_inv_req_shortname(), "send_id$req.req_ref"))
elif settings.get_inv_send_req_ref():
list_fields.insert(3, (settings.get_inv_req_shortname(), "send_id$req_ref"))
s3db.configure("inv_track_item",
orderby = "inv_send.site_id",
sort = True
)
elif report == "inc":
# Summary of Incoming Supplies
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Summary of Incoming Supplies"),
subtitle_list = T("Summary Details"),
)
s3.filter = (FS("recv_id") != None)
list_fields = [(T("Date Received"), "recv_id$date"),
(T("Received By"), "recv_id$recipient_id"),
(settings.get_inv_send_shortname(), "recv_id$send_ref"),
(settings.get_inv_recv_shortname(), "recv_id$recv_ref"),
(settings.get_proc_shortname(), "recv_id$purchase_ref"),
(T("Item/Description"), "item_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
(T("Source"), "supply_org_id"),
(T("Remarks"), "comments"),
]
s3db.configure("inv_track_item",
orderby = "inv_recv.recipient_id",
)
elif report == "util":
# Utilization Report
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Utilization Report"),
subtitle_list = T("Utilization Details"),
)
s3.filter = (FS("item_id") != None)
list_fields = [(T("Item/Description"), "item_id$name"),
(T("Beneficiary"), "send_id$site_id"),
(settings.get_inv_send_shortname(), "send_id$send_ref"),
(T("Items/Description"), "item_id"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
]
if settings.get_inv_send_req():
list_fields.insert(3, (settings.get_inv_req_shortname(), "send_id$req.req_ref"))
elif settings.get_inv_send_req_ref():
list_fields.insert(3, (settings.get_inv_req_shortname(), "send_id$req_ref"))
elif report == "exp":
# Expiration Report
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Expiration Report"),
subtitle_list = T("Expiration Details"),
)
s3.filter = (FS("expiry_date") != None)
list_fields = ["recv_inv_item_id$site_id",
(T("Item/Description"), "item_id"),
(T("Expiration Date"), "expiry_date"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
]
else:
list_fields = None # Configure later (DRY)
if not list_fields:
list_fields = ["status",
"item_source_no",
"item_id",
"item_pack_id",
"send_id",
"recv_id",
"quantity",
(T("Total Weight (kg)"), "total_weight"),
(T("Total Volume (m3)"), "total_volume"),
"bin.layout_id",
"return_quantity",
"recv_quantity",
"recv_bin.layout_id",
"owner_org_id",
"supply_org_id",
]
if settings.get_inv_track_pack_values():
list_fields.insert(10, "pack_value")
list_fields.insert(10, "currency")
s3db.configure("inv_track_item",
list_fields = list_fields,
)
from s3db.inv import inv_rheader
return s3_rest_controller(rheader = inv_rheader)
# =============================================================================
def req():
"""
REST Controller for Inventory Requisitions
"""
# Don't show Templates
from s3 import FS
s3.filter = (FS("is_template") == False)
# Hide completed Requisitions by default
from s3 import s3_set_default_filter
if settings.get_inv_req_workflow():
# 1: Draft
# 2: Submitted
# 3: Approved
s3_set_default_filter("~.workflow_status",
[1, 2, 3],
tablename = "inv_req")
else:
# REQ_STATUS_NONE = 0
# REQ_STATUS_PARTIAL = 1
s3_set_default_filter("~.fulfil_status",
[0, 1],
tablename = "inv_req")
from s3db.inv import inv_req_controller
return inv_req_controller()
# -----------------------------------------------------------------------------
def req_template():
"""
REST Controller for Inventory Requisition Templates
"""
# Hide fields which aren't relevant to templates
table = s3db.inv_req
field = table.is_template
field.default = True
field.readable = field.writable = False
s3.filter = (field == True)
settings.inv.req_prompt_match = False
if "req_item" in request.args:
# List fields for req_item
table = s3db.inv_req_item
list_fields = ["item_id",
"item_pack_id",
"quantity",
"comments",
]
s3db.configure("inv_req_item",
list_fields = list_fields,
)
else:
# Main Req
fields = ["req_ref",
"date",
"date_required",
"date_required_until",
"date_recv",
"recv_by_id",
"cancel",
"commit_status",
"transit_status",
"fulfil_status",
]
for fieldname in fields:
field = table[fieldname]
field.readable = field.writable = False
table.purpose.label = T("Details")
list_fields = ["site_id"
"priority",
"purpose",
"comments",
]
s3db.configure("inv_req",
list_fields = list_fields,
)
# CRUD strings
s3.crud_strings["inv_req"] = Storage(
label_create = T("Create Request Template"),
title_display = T("Request Template Details"),
title_list = T("Request Templates"),
title_update = T("Edit Request Template"),
label_list_button = T("List Request Templates"),
label_delete_button = T("Delete Request Template"),
msg_record_created = T("Request Template Added"),
msg_record_modified = T("Request Template Updated"),
msg_record_deleted = T("Request Template Deleted"),
msg_list_empty = T("No Request Templates"),
)
from s3db.inv import inv_req_controller
return inv_req_controller(template = True)
# =============================================================================
def req_item():
"""
RESTful CRUD controller for Request Items
@ToDo: Filter out fulfilled Items?
"""
# Filter out Template Items
#if request.function != "fema":
s3.filter = (FS("req_id$is_template") == False)
# Custom Methods
from s3db.inv import inv_req_item_inv_item, inv_req_item_order
set_method = s3db.set_method
set_method("inv", "req_item",
method = "inv_item",
action = inv_req_item_inv_item
)
set_method("inv", "req_item",
method = "order",
action = inv_req_item_order
)
def prep(r):
if r.interactive or r.representation == "aadata":
list_fields = s3db.get_config("inv_req_item", "list_fields")
list_fields.insert(1, "req_id$site_id")
levels = gis.get_relevant_hierarchy_levels()
levels.reverse()
for level in levels:
lfield = "req_id$site_id$location_id$%s" % level
list_fields.insert(1, lfield)
s3db.configure("inv_req_item",
insertable = False,
list_fields = list_fields,
)
s3.crud_strings["inv_req_item"].title_list = T("Requested Items")
if r.method != None and r.method != "update" and r.method != "read":
# Hide fields which don't make sense in a Create form
# - includes one embedded in list_create
# - list_fields over-rides, so still visible within list itself
if not settings.get_inv_req_item_quantities_writable():
table = r.table
table.quantity_commit.readable = \
table.quantity_commit.writable = False
table.quantity_transit.readable = \
table.quantity_transit.writable = False
table.quantity_fulfil.readable = \
table.quantity_fulfil.writable = False
return True
s3.prep = prep
def postp(r, output):
if r.interactive and \
not r.component and \
r.method != "import":
if settings.get_inv_req_prompt_match():
s3_action_buttons(r, deletable=False)
req_item_inv_item_btn = {"label": s3_str(T("Request from Facility")),
"url": URL(c = "inv",
f = "req_item",
args = ["[id]", "inv_item"],
),
"_class": "action-btn",
}
s3.actions.append(req_item_inv_item_btn)
return output
s3.postp = postp
return s3_rest_controller("inv", "req_item")
# =============================================================================
def commit():
"""
RESTful CRUD controller for Commits
"""
from s3db.inv import inv_commit_send
s3db.set_method("inv", "commit",
method = "send",
action = inv_commit_send,
)
def prep(r):
if r.interactive and r.record:
# Commitments created through UI should be done via components
if r.component:
if r.component_name == "commit_item":
# Dropdown not Autocomplete
s3db.inv_commit_item.req_item_id.widget = None
# Limit commit items to items from the request
s3db.inv_commit_item.req_item_id.requires = \
IS_ONE_OF(db, "inv_req_item.id",
s3db.inv_req_item_represent,
filterby = "req_id",
filter_opts = [r.record.req_id],
orderby = "inv_req_item.id",
sort = True,
)
else:
# No Component
table = r.table
s3.crud.submit_button = T("Save Changes")
# Limit site_id to facilities the user has permissions for
auth.permitted_facilities(table = table,
error_msg = T("You do not have permission for any facility to make a commitment.") )
table.site_id.comment = A(T("Set as default Site"),
_id = "inv_commit_site_id_link",
_target = "_blank",
_href = URL(c = "default",
f = "user",
args = ["profile"]
))
jappend = s3.jquery_ready.append
jappend('''
$('#inv_commit_site_id_link').click(function(){
var site_id=$('#inv_commit_site_id').val()
if(site_id){
var url = $('#inv_commit_site_id_link').attr('href')
var exists=url.indexOf('?')
if(exists=='-1'){
$('#inv_commit_site_id_link').attr('href',url+'?site_id='+site_id)
}
}
return true
})''')
# Dropdown not Autocomplete
s3db.inv_commit_item.req_item_id.widget = None
# Options updater for inline items
jappend('''
$.filterOptionsS3({
'trigger':{'alias':'commit_item','name':'req_item_id'},
'target':{'alias':'commit_item','name':'item_pack_id'},
'scope':'row',
'lookupPrefix':'req',
'lookupResource':'req_item_packs',
'lookupKey':'req_item_id',
'lookupField':'id',
'msgNoRecords':i18n.no_packs,
'fncPrep':S3.supply.fncPrepItem,
'fncRepresent':S3.supply.fncRepresentItem
})''')
# Custom Form
from s3 import S3SQLCustomForm, S3SQLInlineComponent
crud_form = S3SQLCustomForm("site_id",
"date",
"date_available",
"committer_id",
S3SQLInlineComponent(
"commit_item",
label = T("Items"),
fields = ["req_item_id",
"item_pack_id",
"quantity",
"comments",
]
),
"comments",
)
s3db.configure("inv_commit",
crud_form = crud_form,
)
return True
s3.prep = prep
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
# Items
s3_action_buttons(r)
s3.actions.append({"label": s3_str(T("Prepare Shipment")),
"url": URL(f = "commit",
args = ["[id]", "send"],
),
"_class": "action-btn send-btn dispatch",
})
# Convert to POST
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_commit.js" % appname)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_commit.min.js" % appname)
return output
s3.postp = postp
return s3_rest_controller(rheader = commit_rheader)
# -----------------------------------------------------------------------------
def commit_rheader(r):
""" Resource Header for Commitments """
if r.representation == "html":
record = r.record
if record and r.name == "commit":
from s3 import S3DateTime
s3_date_represent = S3DateTime.date_represent
tabs = [(T("Edit Details"), None),
(T("Items"), "commit_item"),
]
table = r.table
#req_record = db.inv_req[record.req_id]
#req_date = req_record.date
rheader = DIV(TABLE(TR(TH("%s: " % table.req_id.label),
table.req_id.represent(record.req_id),
),
TR(TH("%s: " % T("Committing Warehouse")),
s3db.org_site_represent(record.site_id),
TH("%s: " % T("Commit Date")),
s3_date_represent(record.date),
),
TR(TH("%s: " % table.comments.label),
TD(record.comments or "", _colspan=3)
),
),
)
prepare_btn = A(T("Prepare Shipment"),
_href = URL(f = "commit",
args = [record.id,
"send",
]
),
_id = "commit-send",
_class = "action-btn"
)
# Convert to POST
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_req_rheader.js" % appname)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_req_rheader.min.js" % appname)
s3.rfooter = TAG[""](prepare_btn)
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader.append(rheader_tabs)
return rheader
return None
# -----------------------------------------------------------------------------
def commit_item():
"""
RESTful CRUD controller for Commit Items
"""
return s3_rest_controller()
# =============================================================================
def stock_card():
"""
RESTful CRUD controller for Stock Cards
"""
viewing = get_vars.get("viewing")
if viewing:
get_vars.pop("viewing")
inv_item_id = viewing.split("inv_inv_item.")[1]
table = s3db.inv_inv_item
inv_item = db(table.id == inv_item_id).select(table.site_id,
table.item_id,
table.item_source_no,
table.expiry_date,
limitby = (0, 1),
).first()
if inv_item:
item_source_no = inv_item.item_source_no
table = s3db.inv_stock_card
query = (table.site_id == inv_item.site_id) & \
(table.item_id == inv_item.item_id) & \
(table.item_source_no == item_source_no) & \
(table.expiry_date == inv_item.expiry_date)
exists = db(query).select(table.id,
limitby = (0, 1),
).first()
if exists:
request.args = [str(exists.id), "log"]
def postp(r, output):
if r.id:
# Don't render any Action Buttons
s3.actions = []
else:
url = URL(args = ["[id]", "log"])
s3_action_buttons(r,
deletable = False,
read_url = url,
update_url = url,
)
return output
s3.postp = postp
from s3db.inv import inv_rheader
return s3_rest_controller(rheader = inv_rheader)
# =============================================================================
def minimum():
"""
RESTful CRUD Controller for Stock Minimums
"""
return s3_rest_controller()
# =============================================================================
def order_item():
"""
RESTful CRUD Controller for Order Items
"""
return s3_rest_controller()
# =============================================================================
def package():
"""
RESTful CRUD Controller for Packages (Boxes & Pallets)
"""
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_package.js" % appname)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.inv_package.min.js" % appname)
return s3_rest_controller()
# =============================================================================
def req_approver():
"""
RESTful CRUD Controller for Requisition Approvers
"""
# We need a more complex control: leave to template
#if not auth.s3_has_role("ADMIN"):
# s3.filter = auth.filter_by_root_org(s3db.req_approver)
return s3_rest_controller()
# =============================================================================
def facility():
# Open record in this controller after creation
s3db.configure("org_facility",
create_next = URL(c="inv", f="facility",
args = ["[id]", "read"]),
)
from s3db.org import org_facility_controller
return org_facility_controller()
# -----------------------------------------------------------------------------
def facility_type():
return s3_rest_controller("org")
# =============================================================================
def project():
"""
Simpler version of Projects for use within Inventory module
"""
# Load default Model
s3db.project_project
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm("organisation_id",
"code",
"name",
"end_date",
)
list_fields = ["organisation_id",
"code",
"name",
"end_date",
]
s3db.configure("project_project",
crud_form = crud_form,
filter_widgets = None,
list_fields = list_fields,
)
return s3_rest_controller("project")
# -----------------------------------------------------------------------------
def project_req():
"""
RESTful CRUD controller
- just used for options.s3json lookups
"""
s3.prep = lambda r: \
r.representation == "s3json" and r.method == "options"
return s3_rest_controller()
# -----------------------------------------------------------------------------
def donor():
"""
Filtered version of the organisation() REST controller
"""
# @ToDo: This should be a deployment setting
get_vars["organisation_type.name"] = \
"Academic,Bilateral,Government,Intergovernmental,NGO,UN agency"
# Load model (including normal CRUD strings)
table = s3db.org_organisation
# Modify CRUD Strings
s3.crud_strings.org_organisation = Storage(
label_create = T("Create Donor"),
title_display = T("Donor Details"),
title_list = T("Donors"),
title_update = T("Edit Donor"),
title_upload = T("Import Donors"),
label_list_button = T("List Donors"),
label_delete_button = T("Delete Donor"),
msg_record_created = T("Donor added"),
msg_record_modified = T("Donor updated"),
msg_record_deleted = T("Donor deleted"),
msg_list_empty = T("No Donors currently registered")
)
# Open record in this controller after creation
s3db.configure("org_organisation",
create_next = URL(c="inv", f="donor",
args = ["[id]", "read"]),
)
# NB Type gets defaulted in the Custom CRUD form
# - user needs create permissions for org_organisation_organisation_type
from s3db.org import org_organisation_controller
return org_organisation_controller()
# -----------------------------------------------------------------------------
def supplier():
"""
Filtered version of the organisation() REST controller
"""
get_vars["organisation_type.name"] = "Supplier"
# Load model (including normal CRUD strings)
table = s3db.org_organisation
# Modify CRUD Strings
s3.crud_strings.org_organisation = Storage(
label_create = T("Create Supplier"),
title_display = T("Supplier Details"),
title_list = T("Suppliers"),
title_update = T("Edit Supplier"),
title_upload = T("Import Suppliers"),
label_list_button = T("List Suppliers"),
label_delete_button = T("Delete Supplier"),
msg_record_created = T("Supplier added"),
msg_record_modified = T("Supplier updated"),
msg_record_deleted = T("Supplier deleted"),
msg_list_empty = T("No Suppliers currently registered")
)
# Open record in this controller after creation
s3db.configure("org_organisation",
create_next = URL(c="inv", f="supplier",
args = ["[id]", "read"]),
)
# NB Type gets defaulted in the Custom CRUD form
# - user needs create permissions for org_organisation_organisation_type
from s3db.org import org_organisation_controller
return org_organisation_controller()
# -----------------------------------------------------------------------------
def req_match():
"""
Match Requests
- a Tab for Sites to show what Requests they could potentially match
"""
from s3db.inv import inv_req_match
return inv_req_match()
# -----------------------------------------------------------------------------
def incoming():
"""
Incoming Shipments for Sites
Would be used from inv_req_rheader when looking at Transport Status
"""
# NB This function doesn't currently exist!
from s3db.inv import inv_incoming
return inv_incoming()
# =============================================================================
def inv_item_packs():
"""
Called by filterOptionsS3 to provide the pack options for a
particular Item
Access via the .json representation to avoid work rendering menus, etc
"""
try:
inv_item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
table = s3db.inv_inv_item
ptable = db.supply_item_pack
query = (table.id == inv_item_id) & \
(table.item_id == ptable.item_id)
packs = db(query).select(ptable.id,
ptable.name,
ptable.quantity,
)
SEPARATORS = (",", ":")
output = json.dumps(packs.as_list(), separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def req_item_packs():
"""
Called by S3OptionsFilter to provide the pack options for a Requisition Item
Access via the .json representation to avoid work rendering menus, etc
"""
req_item_id = None
args = request.args
if len(args) == 1 and args[0].isdigit():
req_item_id = args[0]
else:
for v in request.vars:
if "." in v and v.split(".", 1)[1] == "req_item_id":
req_item_id = request.vars[v]
break
table = s3db.supply_item_pack
ritable = s3db.inv_req_item
query = (ritable.id == req_item_id) & \
(ritable.item_id == table.item_id)
rows = db(query).select(table.id,
table.name,
table.quantity,
)
SEPARATORS = (",", ":")
output = json.dumps(rows.as_list(), separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def inv_item_quantity():
"""
Called from s3.inv_send_item.js to
- provide the pack options for a particular item
- lookup all Packs & Pack Quantities (to replace the filterOptionsS3 AJAX call to inv_item_packs)
Access via the .json representation to avoid work rendering menus, etc
"""
try:
inv_item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
table = s3db.inv_inv_item
ptable = db.supply_item_pack
inv_query = (table.id == inv_item_id)
query = inv_query & \
(table.item_pack_id == ptable.id)
inv_item = db(query).select(table.quantity,
ptable.quantity,
limitby = (0, 1),
).first()
query = inv_query & \
(table.item_id == ptable.item_id)
packs = db(query).select(ptable.id,
ptable.name,
ptable.quantity,
)
data = {"quantity": inv_item["inv_inv_item.quantity"] * inv_item["supply_item_pack.quantity"],
"packs": packs.as_list(),
}
SEPARATORS = (",", ":")
output = json.dumps(data, separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def commit_item_json():
"""
Used by s3.supply.js for the ajax_more quantity represent of req_items
Access via the .json representation to avoid work rendering menus, etc
"""
try:
req_item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
stable = s3db.org_site
ctable = s3db.inv_commit
itable = s3db.inv_commit_item
query = (itable.req_item_id == req_item_id) & \
(ctable.id == itable.commit_id) & \
(ctable.site_id == stable.id)
records = db(query).select(ctable.id,
ctable.date,
stable.name,
itable.quantity,
orderby = db.inv_commit.date,
)
output = [{"id": s3_str(T("Committed")),
"quantity": "#",
}]
for row in records:
quantity = row["inv_commit_item.quantity"]
name = row["org_site.name"]
row = row["inv_commit"]
output.append({"id": row.id,
"date": row.date.date().isoformat(),
"quantity": quantity,
"name": name,
})
SEPARATORS = (",", ":")
output = json.dumps(output, separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def recv_item_json():
"""
Used by s3.supply.js for the ajax_more quantity represent of req_items
Access via the .json representation to avoid work rendering menus, etc
"""
try:
req_item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
from s3db.inv import SHIP_STATUS_RECEIVED
rtable = s3db.inv_recv
ittable = s3db.inv_track_item
query = (ittable.req_item_id == req_item_id) & \
(rtable.id == ittable.recv_id) & \
(rtable.status == SHIP_STATUS_RECEIVED)
rows = db(query).select(rtable.id,
rtable.date,
rtable.recv_ref,
ittable.quantity,
)
output = [{"id": s3_str(T("Received")),
"quantity": "#",
}]
for row in rows:
quantity = row["inv_track_item.quantity"]
row = row["inv_recv"]
output.append({"id": row.id,
"date": row.date.date().isoformat(),
"quantity": quantity,
"name": row.recv_ref,
})
SEPARATORS = (",", ":")
output = json.dumps(output, separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def send_item_json():
"""
Used by s3.supply.js for the ajax_more quantity represent of req_items
Access via the .json representation to avoid work rendering menus, etc
"""
try:
req_item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
from s3db.inv import SHIP_STATUS_SENT, SHIP_STATUS_RECEIVED
istable = s3db.inv_send
ittable = s3db.inv_track_item
query = (ittable.req_item_id == req_item_id) & \
(istable.id == ittable.send_id) & \
((istable.status == SHIP_STATUS_SENT) | \
(istable.status == SHIP_STATUS_RECEIVED))
rows = db(query).select(istable.id,
istable.send_ref,
istable.date,
ittable.quantity,
)
output = [{"id": s3_str(T("Sent")),
"quantity": "#",
}]
for row in rows:
quantity = row["inv_track_item.quantity"]
row = row["inv_send"]
output.append({"id": row.id,
"date": row.date.date().isoformat(),
"quantity": quantity,
"name": row.send_ref,
})
SEPARATORS = (",", ":")
output = json.dumps(output, separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return output
# END =========================================================================
| 40.108931
| 132
| 0.434439
|
cd853c12e1f360ffb59f5bd5476fe60ace9d24d7
| 1,588
|
py
|
Python
|
src/modules/connection.py
|
YHCClin/simple-effective-text-matching-pytorch
|
f6f2b15ad0a5270a69b3c961d99dd8645771bc99
|
[
"Apache-2.0"
] | 4
|
2021-08-30T03:08:26.000Z
|
2022-01-09T08:19:48.000Z
|
src/modules/connection.py
|
YHCClin/simple-effective-text-matching-pytorch
|
f6f2b15ad0a5270a69b3c961d99dd8645771bc99
|
[
"Apache-2.0"
] | 1
|
2021-10-05T12:45:38.000Z
|
2021-10-05T12:45:38.000Z
|
src/modules/connection.py
|
YHCClin/simple-effective-text-matching-pytorch
|
f6f2b15ad0a5270a69b3c961d99dd8645771bc99
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright (C) 2019 Alibaba Group Holding Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import torch
import torch.nn as nn
from . import Linear
from functools import partial
from src.utils.registry import register
registry = {}
register = partial(register, registry=registry)
@register('none')
class NullConnection(nn.Module):
def forward(self, x, _, __):
return x
@register('residual')
class Residual(nn.Module):
def __init__(self, args):
super().__init__()
self.linear = Linear(args.embedding_dim, args.hidden_size)
def forward(self, x, res, i):
if i == 1:
res = self.linear(res)
return (x + res) * math.sqrt(0.5)
@register('aug')
class AugmentedResidual(nn.Module):
def forward(self, x, res, i):
if i == 1:
return torch.cat([x, res], dim=-1) # res is embedding
hidden_size = x.size(-1)
x = (res[:, :, :hidden_size] + x) * math.sqrt(0.5)
return torch.cat([x, res[:, :, hidden_size:]], dim=-1) # latter half of res is embedding
| 29.962264
| 97
| 0.673174
|
e9b20476e70ff151687cddc5ad4e87c3dfe5a65d
| 257
|
py
|
Python
|
src/market/roles.py
|
wiktorcie/docker-django-example
|
87c5cc2ac45f66564b30538d43c5a8eadba42011
|
[
"MIT"
] | null | null | null |
src/market/roles.py
|
wiktorcie/docker-django-example
|
87c5cc2ac45f66564b30538d43c5a8eadba42011
|
[
"MIT"
] | null | null | null |
src/market/roles.py
|
wiktorcie/docker-django-example
|
87c5cc2ac45f66564b30538d43c5a8eadba42011
|
[
"MIT"
] | null | null | null |
from rolepermissions.roles import AbstractUserRole
class Vendor(AbstractUserRole):
available_permissions = {
'create_listing': True,
}
class Moderator(AbstractUserRole):
available_permissions = {
'do_something': True,
}
| 17.133333
| 50
| 0.696498
|
bacccc8fa8e270a1330a87aecc20885a658d12c9
| 874
|
py
|
Python
|
domintell/example/scan.py
|
yaccri/python-domintell
|
e8a17c9f25ef071a58dd0656746bde9105ba5f01
|
[
"MIT"
] | 1
|
2021-12-03T04:29:21.000Z
|
2021-12-03T04:29:21.000Z
|
domintell/example/scan.py
|
yaccri/python-domintell
|
e8a17c9f25ef071a58dd0656746bde9105ba5f01
|
[
"MIT"
] | 3
|
2020-09-20T11:50:28.000Z
|
2021-08-13T10:16:14.000Z
|
domintell/example/scan.py
|
yaccri/python-domintell
|
e8a17c9f25ef071a58dd0656746bde9105ba5f01
|
[
"MIT"
] | 6
|
2020-10-05T20:23:06.000Z
|
2021-09-14T07:18:31.000Z
|
#!/usr/bin/python3
"""
Example code to scan Domintell and return list of installed modules.
"""
import time
import logging
import sys
import domintell
import os, sys
from config import host
def _on_message(message):
print('received message', message)
print(message)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
"""
please create a simple credentials.py:
host = {
'ADDRESS': '192.168.0.1:17481',
'SECRET': '<your password hash>'
}
"""
#pylint: disable-msg=C0103
logging.info('Configuring controller for {}'.format(host['ADDRESS']))
controller = domintell.Controller(host['ADDRESS'])
controller.subscribe(_on_message)
logging.info('LOGIN')
controller.login(host['SECRET'])
time.sleep(10)
logging.info('Starting scan')
controller.scan(None)
logging.info('Starting sleep')
time.sleep(1000)
logging.info('Exiting ...')
controller.stop()
| 19
| 69
| 0.732265
|
374399b0a4c7b1fec3b718630c621fc550940bf0
| 14,380
|
py
|
Python
|
data_loader/data_loader.py
|
Edelbert/tf2-mobile-2d-single-pose-estimation
|
a6961b2c12e8edfd9b7c4e87d0925c046ff7b673
|
[
"Apache-2.0"
] | null | null | null |
data_loader/data_loader.py
|
Edelbert/tf2-mobile-2d-single-pose-estimation
|
a6961b2c12e8edfd9b7c4e87d0925c046ff7b673
|
[
"Apache-2.0"
] | null | null | null |
data_loader/data_loader.py
|
Edelbert/tf2-mobile-2d-single-pose-estimation
|
a6961b2c12e8edfd9b7c4e87d0925c046ff7b673
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Jaewook Kang (jwkang10@gmail.com) All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# -*- coding: utf-8 -*-
"""Efficient tf-tiny-pose-estimation using tf.data.Dataset.
code ref: https://github.com/edvardHua/PoseEstimationForMobile
"""
from __future__ import absolute_import, division, print_function
from dataflow import RNGDataFlow
import tensorflow as tf
tf.random.set_seed(3)
import os
import math
import jpeg4py as jpeg
import numpy as np
from .dataset_augment import Augmentation
from pycocotools.coco import COCO
import dataflow as D
import cv2
from typing import List, Tuple, Dict, Any
# for coco dataset
from data_loader import dataset_augment
from data_loader.dataset_prepare import CocoMetadata
class DataLoader(object):
"""Generates DataSet input_fn for training or evaluation
Args:
is_training: `bool` for whether the input is for training
data_dir: `str` for the directory of the training and validation data;
if 'null' (the literal string 'null', not None), then construct a null
pipeline, consisting of empty images.
use_bfloat16: If True, use bfloat16 precision; else use float32.
transpose_input: 'bool' for whether to use the double transpose trick
"""
def __init__(self,
config_training,
config_model,
config_preproc,
images_dir_path,
annotation_json_path,
dataset_name = 'COCO'):
self.image_preprocessing_fn = dataset_augment.preprocess_image
self.images_dir_path = images_dir_path
self.annotation_json_path = annotation_json_path
self.annotations_info = None
self.config_training = config_training
self.config_model = config_model
self.config_preproc = config_preproc
self.dataset_name = dataset_name
if images_dir_path == 'null' or images_dir_path == '' or images_dir_path is None:
exit(1)
if annotation_json_path == 'null' or annotation_json_path == '' or annotation_json_path is None:
exit(1)
self.annotations_info = COCO(self.annotation_json_path)
number_of_keypoints = len(list(self.annotations_info.anns.values())[0]["keypoints"]) / 3
self.number_of_keypoints = int(number_of_keypoints)
self.imgIds = self.annotations_info.getImgIds()
def _set_shapes(self, img, heatmap):
img.set_shape([self.config_training["batch_size"],
self.config_model["input_height"],
self.config_model["input_width"],
3])
heatmap.set_shape([self.config_training["batch_size"],
self.config_model["output_height"],
self.config_model["output_width"],
self.number_of_keypoints])
return img, heatmap
def _parse_function(self, imgId, ann=None):
"""
:param imgId: Tensor
:return:
"""
try:
imgId = imgId.numpy()
except AttributeError:
# print(AttributeError)
var = None
if ann is not None:
self.annotations_info = ann
image_info = self.annotations_info.loadImgs([imgId])[0]
keypoint_info_ids = self.annotations_info.getAnnIds(imgIds=imgId)
keypoint_infos = self.annotations_info.loadAnns(keypoint_info_ids)
#print(image_info['coco_url'], imgId, keypoint_infos)
image_id = image_info['id']
img_filename = image_info['file_name']
image_filepath = os.path.join(self.images_dir_path, img_filename)
img_meta_data = CocoMetadata(idx=image_id,
img_path=image_filepath,
img_meta=image_info,
keypoint_infos=keypoint_infos,
number_of_heatmap=self.number_of_keypoints,
sigma=self.config_preproc["heatmap_std"],
dataset_name = self.dataset_name)
# print('joint_list = %s' % img_meta_data.joint_list)
images, labels = self.image_preprocessing_fn(img_meta_data=img_meta_data,
config_model=self.config_model,
config_preproc=self.config_preproc,
dataset_name = self.dataset_name)
return images, labels
def input_fn(self, params=None):
"""Input function which provides a single batch for train or eval.
Args:
params: `dict` of parameters passed from the `TPUEstimator`.
`params['batch_size']` is always provided and should be used as the
effective batch size.
Returns:
A `tf.data.Dataset` object.
doc reference: https://www.tensorflow.org/api_docs/python/tf/data/TFRecordDataset
"""
dataset = tf.data.Dataset.from_tensor_slices(self.imgIds)
dataset = dataset.apply(tf.data.experimental.map_and_batch(
map_func=lambda imgId: tuple(
tf.py_function(
func=self._parse_function,
inp=[imgId],
Tout=[tf.float32, tf.float32])),
batch_size=self.config_training["batch_size"],
num_parallel_calls=self.config_training["multiprocessing_num"],
drop_remainder=True))
# cache entire dataset in memory after preprocessing
# dataset = dataset.cache() # do not use this code for OOM problem
dataset = dataset.map(self._set_shapes,
num_parallel_calls=self.config_training["multiprocessing_num"])
# Prefetch overlaps in-feed with training
# dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) # tf.data.experimental.AUTOTUNE have to be upper than 1.13
dataset = dataset.prefetch(buffer_size=self.config_training["batch_size"] * 3)
# tf.logging.info('[Input_fn] dataset pipeline building complete')
return dataset
def get_images(self, idx, batch_size):
imgs = []
labels = []
for i in range(batch_size):
img, label = self._parse_function(self.imgIds[i + idx])
#print(np.sum(label))
imgs.append(img)
labels.append(label)
return np.array(imgs), np.array(labels)
class MHPLoader(object):
def __init__(self, dataset: RNGDataFlow, augmentor: Augmentation, config: Dict[Any, Any], train: bool, tf: bool = False, debug: bool = False):
self.dataset = dataset
self.augmentor = augmentor
self.debug = debug
self.config = config
self.train = train
wrapped_dataset = self._wrap_flow(self.dataset)
self.tf = tf
if self.tf:
self.wrapped_dataset = self._wrap_tf()
else:
self.wrapped_dataset = wrapped_dataset
def _get_heatmap(self, pose, img_shape: Tuple[int]):
height, width = img_shape[0], img_shape[1]
heatmap = np.zeros((self.config['num_keypoints'], height, width), dtype = np.float32)
th = 1.6052
delta = math.sqrt(th * 2)
for idx, p in enumerate(pose):
if p[0] < 0 or p[1] < 0:
continue
x0 = int(max(0, p[0] - delta * self.config['heatmap_std']))
y0 = int(max(0, p[1] - delta * self.config['heatmap_std']))
x1 = int(min(width, p[0] + delta * self.config['heatmap_std']))
y1 = int(min(height, p[1] + delta * self.config['heatmap_std']))
for y in range(y0, y1):
for x in range(x0, x1):
d = (x - p[0]) ** 2 + (y - p[1]) ** 2
exp = d / 2.0 / self.config['heatmap_std'] / self.config['heatmap_std']
if exp > th:
continue
heatmap[idx][y][x] = max(heatmap[idx][y][x], math.exp(-exp))
heatmap[idx][y][x] = min(heatmap[idx][y][x], 1.0)
heatmap = heatmap.transpose((1, 2, 0))
return heatmap
def rescale_sample(self, sample, output_size, mean):
image_, pose_ = sample['image']/256.0, sample['pose']
h, w = image_.shape[:2]
im_scale = min(float(output_size[0]) / float(h), float(output_size[1]) / float(w))
new_h = int(image_.shape[0] * im_scale)
new_w = int(image_.shape[1] * im_scale)
image = cv2.resize(image_, (new_w, new_h),
interpolation=cv2.INTER_LINEAR)
left_pad = (output_size[1] - new_w) // 2
right_pad = (output_size[1] - new_w) - left_pad
top_pad = (output_size[0] - new_h) // 2
bottom_pad = (output_size[0] - new_h) - top_pad
pad = ((top_pad, bottom_pad), (left_pad, right_pad))
image = np.stack([np.pad(image[:,:,c], pad, mode='constant', constant_values=mean[c])
for c in range(3)], axis=2)
pose = (pose_.reshape([-1,2])/np.array([w,h])*np.array([new_w,new_h]))
pose += [left_pad, top_pad]
sample['image'] = image
sample['pose'] = pose
return sample, left_pad, top_pad, new_w, new_h
def _read_and_aug(self, dp, augmentor):
fpath, im_info, img_id = dp
#read image
try:
img = jpeg.JPEG(fpath).decode()
except Exception as ex:
print(f'cant open {fpath} by jpg, fall back to opencv reading')
try:
img = cv2.imread(fpath, cv2.IMREAD_COLOR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
except cv2.error as ex:
print(ex, fpath)
#read keypoints
keypoints = self.gt_points_to_array(im_info[0]['keypoints'])
sample = {'image': img, 'pose': keypoints, 'fpath' : fpath}
mean = np.array([0.485, 0.456, 0.406]).astype(np.float32)
std = np.array([0.229, 0.224, 0.225]).astype(np.float32)
#augment image and keypoint
if augmentor:
sample = augmentor(sample)
if self.debug:
sample['original_img'] = img
sample['original_pose'] = keypoints
#scale image and poses
sample['image'] = sample['image'].astype(np.float32)
sample, left_pad, top_pad, new_w, new_h = self.rescale_sample(sample, (self.config['in_height'], self.config['in_width']), mean)
#create heatmap
sample['heatmap'] = self._get_heatmap(sample['pose'], sample['image'].shape)
#scale to network input
sample['heatmap'] = cv2.resize(sample['heatmap'], (self.config['out_height'], self.config['out_width']))
#sample['heatmap'] = np.clip(sample['heatmap'], 0, 1)
#sample['image'] = cv2.resize(sample['image'], (self.config['in_height'], self.config['in_width']), interpolation=cv2.INTER_AREA)
#print(sample['heatmap'].shape)
if self.debug:
return sample
#print('return')
#print(sample['image'])
sample['image'] = (sample['image']-mean)/(std)
#return sample['image'], sample['heatmap'], img_id, img
sample['img_id'] = img_id
#
if not self.train:
sample['original_img'] = img
sample['left_pad'] = left_pad
sample['top_pad'] = top_pad
sample['new_w'] = new_w
sample['new_h'] = new_h
return sample
def _wrap_flow(self, dataset: RNGDataFlow ) -> RNGDataFlow:
dataset = D.MultiProcessMapData(
dataset,
num_proc=12,
map_func=lambda x: self._read_and_aug(x, self.augmentor),
buffer_size=self.config['batch_size'] * 3,
strict=True,
)
if not self.debug:
if self.train:
dataset = D.RepeatedData(dataset, num = -1)
#dataset = D.LocallyShuffleData(dataset, 2000)
dataset = D.BatchData(dataset, self.config['batch_size'])
dataset.reset_state()
return dataset
def _parse_to_tf(self, img, heatmap):
return img, heatmap
def _wrap_tf(self):
print('wrap tf')
def gen():
for img, heatmap in self.wrapped_dataset:
yield img, heatmap
#print('run gen')
#for data in gen():
#pass
#print('run tf')
dataset = tf.data.Dataset.from_generator(
gen,
output_types = (tf.float32, tf.int16),
output_shapes= (
[self.config['in_height'], self.config['in_width'], 3],
[self.config['out_height'], self.config['out_width'], self.config['num_keypoints']],
),
)
#dataset = dataset.map(lambda x, y: (self._parse_to_tf(x, y)), num_parallel_calls= 12)
dataset = dataset.batch(self.config['batch_size'])
#for i in range(10):
#data = next(dataset)
#print(len(data))
return dataset
def __iter__(self):
for data in self.wrapped_dataset:
yield data
def gt_points_to_array(self, points: List[float]) -> np.ndarray:
output = []
for x, y, v in zip(points[0::3], points[1::3], points[2::3]):
if v == 2:
output.append(int(x))
output.append(int(y))
else:
output.append(-10000)
output.append(-10000)
return np.array(output).reshape([-1, 2])
| 37.742782
| 146
| 0.576634
|
84ad08d7633ea440ffc3f4f891a35e81f772ab1e
| 4,256
|
py
|
Python
|
ck_airport.py
|
LGinC/checkinpanel
|
01692c6fbad0bf7c16cc180a836f3c2a69253772
|
[
"MIT"
] | 752
|
2021-08-29T11:24:39.000Z
|
2022-03-31T09:22:56.000Z
|
ck_airport.py
|
LGinC/checkinpanel
|
01692c6fbad0bf7c16cc180a836f3c2a69253772
|
[
"MIT"
] | 99
|
2021-08-30T02:15:32.000Z
|
2022-03-30T08:51:06.000Z
|
ck_airport.py
|
LGinC/checkinpanel
|
01692c6fbad0bf7c16cc180a836f3c2a69253772
|
[
"MIT"
] | 280
|
2021-08-29T08:27:44.000Z
|
2022-03-30T09:39:06.000Z
|
# -*- coding: utf-8 -*-
"""
:author @Icrons
cron: 20 10 * * *
new Env('机场签到');
"""
import json
import re
import traceback
import requests
import urllib3
from notify_mtr import send
from utils import get_data
urllib3.disable_warnings()
class SspanelQd(object):
def __init__(self, check_items):
self.check_items = check_items
@staticmethod
def checkin(url, email, password):
url = url.rstrip("/")
email = email.split("@")
email = email[0] + "%40" + email[1]
session = requests.session()
"""
以下 except 都是用来捕获当 requests 请求出现异常时,
通过捕获然后等待网络情况的变化,以此来保护程序的不间断运行
"""
try:
session.get(url, verify=False)
except requests.exceptions.ConnectionError:
msg = url + "\n" + "网络不通"
return msg
except requests.exceptions.ChunkedEncodingError:
msg = url + "\n" + "分块编码错误"
return msg
except Exception:
msg = url + "\n" + "未知错误,请查看日志"
print(f"未知错误,错误信息:\n{traceback.format_exc()}")
return msg
login_url = url + "/auth/login"
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
}
post_data = "email=" + email + "&passwd=" + password + "&code="
post_data = post_data.encode()
try:
res = session.post(login_url, post_data, headers=headers, verify=False)
res_str = res.text.encode("utf-8").decode("unicode_escape")
print(f"{url} 接口登录返回信息:{res_str}")
res_dict = json.loads(res_str)
if res_dict.get("ret") == 0:
msg = url + "\n" + str(res_dict.get("msg"))
return msg
except Exception:
msg = url + "\n" + "登录失败,请查看日志"
print(f"登录失败,错误信息:\n{traceback.format_exc()}")
return msg
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
"Referer": url + "/user",
}
try:
response = session.post(
url + "/user/checkin", headers=headers, verify=False
)
res_str = response.text.encode("utf-8").decode("unicode_escape")
print(f"{url} 接口签到返回信息:{res_str}")
res_dict = json.loads(res_str)
check_msg = res_dict.get("msg")
if check_msg:
msg = url + "\n" + str(check_msg)
else:
msg = url + "\n" + str(res_dict)
except Exception:
msg = url + "\n" + "签到失败,请查看日志"
print(f"签到失败,错误信息:\n{traceback.format_exc()}")
info_url = url + "/user"
response = session.get(info_url, verify=False)
"""
以下只适配了editXY主题
"""
try:
level = re.findall(r'\["Class", "(.*?)"],', response.text)[0]
day = re.findall(r'\["Class_Expire", "(.*)"],', response.text)[0]
rest = re.findall(r'\["Unused_Traffic", "(.*?)"]', response.text)[0]
msg = (
url
+ "\n- 今日签到信息:"
+ str(msg)
+ "\n- 用户等级:"
+ str(level)
+ "\n- 到期时间:"
+ str(day)
+ "\n- 剩余流量:"
+ str(rest)
)
except Exception:
pass
return msg
def main(self):
msg_all = ""
for check_item in self.check_items:
# 机场地址
url = str(check_item.get("url"))
# 登录信息
email = str(check_item.get("email"))
password = str(check_item.get("password"))
if url and email and password:
msg = self.checkin(url=url, email=email, password=password)
else:
msg = "配置错误"
msg_all += msg + "\n\n"
return msg_all
if __name__ == "__main__":
data = get_data()
_check_items = data.get("AIRPORT", [])
res = SspanelQd(check_items=_check_items).main()
send("机场签到", res)
| 31.294118
| 138
| 0.507754
|
f598a2d4811c4993486203d456dd53dd436ea223
| 1,588
|
py
|
Python
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_metadata_service_get_artifact_sync.py
|
lclc19/python-aiplatform
|
d8da2e365277441abadb04328943f23345d72b0e
|
[
"Apache-2.0"
] | 180
|
2020-09-23T17:21:15.000Z
|
2022-03-30T17:25:47.000Z
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_metadata_service_get_artifact_sync.py
|
lclc19/python-aiplatform
|
d8da2e365277441abadb04328943f23345d72b0e
|
[
"Apache-2.0"
] | 601
|
2020-09-23T16:23:44.000Z
|
2022-03-31T19:08:23.000Z
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_metadata_service_get_artifact_sync.py
|
lclc19/python-aiplatform
|
d8da2e365277441abadb04328943f23345d72b0e
|
[
"Apache-2.0"
] | 109
|
2020-09-23T16:22:04.000Z
|
2022-03-28T21:18:29.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetArtifact
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1_MetadataService_GetArtifact_sync]
from google.cloud import aiplatform_v1
def sample_get_artifact():
"""Snippet for get_artifact"""
# Create a client
client = aiplatform_v1.MetadataServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.GetArtifactRequest(
name="projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}",
)
# Make the request
response = client.get_artifact(request=request)
# Handle response
print(response)
# [END aiplatform_generated_aiplatform_v1_MetadataService_GetArtifact_sync]
| 33.083333
| 108
| 0.762594
|
c40dc5c005d9112efaeb35c4cadf0673b809be6b
| 6,485
|
py
|
Python
|
var/spack/repos/builtin/packages/py-azureml-dataprep-rslex/package.py
|
vchuravy/spack
|
f74670e210dc9b1996be2ca2932fc465fb8ebe9e
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-azureml-dataprep-rslex/package.py
|
vchuravy/spack
|
f74670e210dc9b1996be2ca2932fc465fb8ebe9e
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 12
|
2021-05-12T05:54:41.000Z
|
2022-03-30T11:09:24.000Z
|
var/spack/repos/builtin/packages/py-azureml-dataprep-rslex/package.py
|
vchuravy/spack
|
f74670e210dc9b1996be2ca2932fc465fb8ebe9e
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
from spack import *
class PyAzuremlDataprepRslex(Package):
"""Azure Machine Learning Data Prep RsLex is a Rust implementation of Data Prep's
capabilities to load, transform, and write data for machine learning workflows."""
homepage = "http://aka.ms/data-prep-sdk"
if sys.platform == 'darwin':
version('1.9.0-py3.9', sha256='9bdaa31d129dac19ee20d5a3aad1726397e90d8d741b4f6de4554040800fefe8', expand=False,
url='https://pypi.io/packages/cp39/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl')
version('1.9.0-py3.8', sha256='9b2e741ac1c53d3f7e6061d264feccf157d97e404c772933a176e6021014484e', expand=False, preferred=True,
url='https://pypi.io/packages/cp38/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl')
version('1.9.0-py3.7', sha256='9993b369fb9d94d885611859ee957582304c1d8953fc8b48567b786bbfd8062b', expand=False,
url='https://pypi.io/packages/cp37/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl')
version('1.9.0-py3.6', sha256='80d518774591deb2c8f1457708c10c9ba348407d7aa49e0710358f46846fcbef', expand=False,
url='https://pypi.io/packages/cp36/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl')
version('1.9.0-py3.5', sha256='91a5c09796e60570620efb7d66f05647557ec6d39aab8b22c0e13926c402ca5b', expand=False,
url='https://pypi.io/packages/cp35/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp35-cp35m-macosx_10_9_x86_64.whl')
version('1.8.0-py3.9', sha256='677c25a7e23ec7f91d25aa596f382f7f3b6d60fbc3258bead2b2a6aa42f3a16d', expand=False,
url='https://pypi.io/packages/cp39/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl')
version('1.8.0-py3.8', sha256='d7f2dec06296544b1707f5b01c6a4eaad744b4abfe9e8e89830b561c84d95a7a', expand=False,
url='https://pypi.io/packages/cp38/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl')
version('1.8.0-py3.7', sha256='8e9feb3187f11fb86f525bc88bf6a6171d7e7d6e2860411a5b82d1f3ecaa8ae8', expand=False,
url='https://pypi.io/packages/cp37/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl')
version('1.8.0-py3.6', sha256='f5f7c9af1f1ecfbfee0e5822db180de05c6f5aeed34f6d0b3fd26e210f476d3e', expand=False,
url='https://pypi.io/packages/cp36/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp36-cp36m-macosx_10_9_x86_64.whl')
version('1.8.0-py3.5', sha256='1c610a25a3e09d4ebb95c42baaa57b5c0c66e31522a6bff52dda0df2d6ac7f4d', expand=False,
url='https://pypi.io/packages/cp35/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp35-cp35m-macosx_10_9_x86_64.whl')
elif sys.platform.startswith('linux'):
version('1.9.0-py3.9', sha256='79d52bb427e3ca781a645c4f11f7a8e5e2c8f61e61bfc162b4062d8e47bcf3d6', expand=False,
url='https://pypi.io/packages/cp39/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp39-cp39-manylinux1_x86_64.whl')
version('1.9.0-py3.8', sha256='a52461103b45867dd919bab593bb6f2426c9b5f5a435081e82a3c57c54c3add6', expand=False, preferred=True,
url='https://pypi.io/packages/cp38/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp38-cp38-manylinux1_x86_64.whl')
version('1.9.0-py3.7', sha256='d7b6e15401b88cec2915b0bd6298ae7f54584d01ee14e4a24ffb950b7578bceb', expand=False,
url='https://pypi.io/packages/cp37/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp37-cp37m-manylinux1_x86_64.whl')
version('1.9.0-py3.6', sha256='2723bf56f2d11e5ee00c6619f2365bd594e85ba116ffc912a2433c52913d0890', expand=False,
url='https://pypi.io/packages/cp36/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp36-cp36m-manylinux1_x86_64.whl')
version('1.9.0-py3.5', sha256='d5c6d363da2b3ace1baa9ad3e645ad8a19fdacf0b95dd1f8b6ab19c4371cc10f', expand=False,
url='https://pypi.io/packages/cp35/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.9.0-cp35-cp35m-manylinux1_x86_64.whl')
version('1.8.0-py3.9', sha256='e251a077669703ca117b157b225fbc20832169f913476cf79c01a5c6f8ff7a50', expand=False,
url='https://pypi.io/packages/cp39/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp39-cp39-manylinux1_x86_64.whl')
version('1.8.0-py3.8', sha256='2ebfa164f0933a5cec383cd27ba10d33861a73237ef481ada5a9a822bb55514a', expand=False,
url='https://pypi.io/packages/cp38/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp38-cp38-manylinux1_x86_64.whl')
version('1.8.0-py3.7', sha256='0588c6e503635aa6d4c64f7bbb3a3be52679f24ac89e2c8d4e96fd991d7006a2', expand=False,
url='https://pypi.io/packages/cp37/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp37-cp37m-manylinux1_x86_64.whl')
version('1.8.0-py3.6', sha256='195507ba55aa5ac7c5d37d05b8ac25813add0da5cc9bd4a04f2cb5da984cb287', expand=False,
url='https://pypi.io/packages/cp36/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp36-cp36m-manylinux1_x86_64.whl')
version('1.8.0-py3.5', sha256='9dfbd1065030dee3aa45b6796c087acffb06cfcbe97cc877e255e21e320362be', expand=False,
url='https://pypi.io/packages/cp35/a/azureml_dataprep_rslex/azureml_dataprep_rslex-1.8.0-cp35-cp35m-manylinux1_x86_64.whl')
extends('python')
depends_on('py-pip', type='build')
depends_on('python@3.9.0:3.9.999', when='@1.9.0-py3.9,1.8.0-py3.9', type=('build', 'run'))
depends_on('python@3.8.0:3.8.999', when='@1.9.0-py3.8,1.8.0-py3.8', type=('build', 'run'))
depends_on('python@3.7.0:3.7.999', when='@1.9.0-py3.7,1.8.0-py3.7', type=('build', 'run'))
depends_on('python@3.6.0:3.6.999', when='@1.9.0-py3.6,1.8.0-py3.6', type=('build', 'run'))
depends_on('python@3.5.0:3.5.999', when='@1.9.0-py3.5,1.8.0-py3.5', type=('build', 'run'))
def install(self, spec, prefix):
pip = which('pip')
pip('install', self.stage.archive_file, '--prefix={0}'.format(prefix))
| 87.635135
| 140
| 0.74017
|
cd764522f096603697fc91268d5c7d3870d760f2
| 782
|
py
|
Python
|
YoloV1Train.py
|
cersar/BasicNetwork
|
119ebb745e67a9b74b72cc4635fea360db0ed43f
|
[
"MIT"
] | 4
|
2019-01-02T07:54:51.000Z
|
2019-01-04T06:11:15.000Z
|
YoloV1Train.py
|
cersar/BasicNetwork
|
119ebb745e67a9b74b72cc4635fea360db0ed43f
|
[
"MIT"
] | null | null | null |
YoloV1Train.py
|
cersar/BasicNetwork
|
119ebb745e67a9b74b72cc4635fea360db0ed43f
|
[
"MIT"
] | null | null | null |
from network.YoloV1 import YoloV1
from model.train import fit
from util.data_util import load_data,preprocess_data
import tensorflow as tf
voc_labels = ["aeroplane", "bicycle", "bird", "boat", "bottle",
"bus", "car", "cat", "chair", "cow", "diningtable", "dog",
"horse", "motorbike", "person", "pottedplant", "sheep", "sofa",
"train", "tvmonitor"]
net = YoloV1((448, 448, 3))
trainable_variables = net.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)[-2:-1]
images,labels = load_data('dataset/yolo_train_data',mode='train')
X,Y=preprocess_data(images,labels,mode='train')
fit(net,X,Y,10
,200,trainable_list=trainable_variables,
pretrained_weight=r'E:\Myproject\python\yolov1\model\yolov1/model.ckpt',
save_model_dir='model_saved/yolov1/model.ckpt',lr=1e-4)
| 43.444444
| 81
| 0.727621
|
645c17e3f4a6b865217e0c673edc9e8119b485f8
| 6,116
|
py
|
Python
|
tests/test_envs.py
|
SwapnilPande/stable-baselines3
|
93f62cef7c44295f7e82c751a8a7b23653361313
|
[
"MIT"
] | 3,204
|
2020-05-05T08:27:19.000Z
|
2022-03-31T12:23:31.000Z
|
tests/test_envs.py
|
SwapnilPande/stable-baselines3
|
93f62cef7c44295f7e82c751a8a7b23653361313
|
[
"MIT"
] | 739
|
2020-05-08T15:03:44.000Z
|
2022-03-31T20:16:02.000Z
|
tests/test_envs.py
|
SwapnilPande/stable-baselines3
|
93f62cef7c44295f7e82c751a8a7b23653361313
|
[
"MIT"
] | 807
|
2020-05-09T04:06:04.000Z
|
2022-03-31T05:54:28.000Z
|
import types
import gym
import numpy as np
import pytest
from gym import spaces
from stable_baselines3.common.env_checker import check_env
from stable_baselines3.common.envs import (
BitFlippingEnv,
FakeImageEnv,
IdentityEnv,
IdentityEnvBox,
IdentityEnvMultiBinary,
IdentityEnvMultiDiscrete,
SimpleMultiObsEnv,
)
ENV_CLASSES = [
BitFlippingEnv,
IdentityEnv,
IdentityEnvBox,
IdentityEnvMultiBinary,
IdentityEnvMultiDiscrete,
FakeImageEnv,
SimpleMultiObsEnv,
]
@pytest.mark.parametrize("env_id", ["CartPole-v0", "Pendulum-v0"])
def test_env(env_id):
"""
Check that environmnent integrated in Gym pass the test.
:param env_id: (str)
"""
env = gym.make(env_id)
with pytest.warns(None) as record:
check_env(env)
# Pendulum-v0 will produce a warning because the action space is
# in [-2, 2] and not [-1, 1]
if env_id == "Pendulum-v0":
assert len(record) == 1
else:
# The other environments must pass without warning
assert len(record) == 0
@pytest.mark.parametrize("env_class", ENV_CLASSES)
def test_custom_envs(env_class):
env = env_class()
with pytest.warns(None) as record:
check_env(env)
# No warnings for custom envs
assert len(record) == 0
@pytest.mark.parametrize(
"kwargs",
[
dict(continuous=True),
dict(discrete_obs_space=True),
dict(image_obs_space=True, channel_first=True),
dict(image_obs_space=True, channel_first=False),
],
)
def test_bit_flipping(kwargs):
# Additional tests for BitFlippingEnv
env = BitFlippingEnv(**kwargs)
with pytest.warns(None) as record:
check_env(env)
# No warnings for custom envs
assert len(record) == 0
def test_high_dimension_action_space():
"""
Test for continuous action space
with more than one action.
"""
env = FakeImageEnv()
# Patch the action space
env.action_space = spaces.Box(low=-1, high=1, shape=(20,), dtype=np.float32)
# Patch to avoid error
def patched_step(_action):
return env.observation_space.sample(), 0.0, False, {}
env.step = patched_step
check_env(env)
@pytest.mark.parametrize(
"new_obs_space",
[
# Small image
spaces.Box(low=0, high=255, shape=(32, 32, 3), dtype=np.uint8),
# Range not in [0, 255]
spaces.Box(low=0, high=1, shape=(64, 64, 3), dtype=np.uint8),
# Wrong dtype
spaces.Box(low=0, high=255, shape=(64, 64, 3), dtype=np.float32),
# Not an image, it should be a 1D vector
spaces.Box(low=-1, high=1, shape=(64, 3), dtype=np.float32),
# Tuple space is not supported by SB
spaces.Tuple([spaces.Discrete(5), spaces.Discrete(10)]),
# Nested dict space is not supported by SB3
spaces.Dict({"position": spaces.Dict({"abs": spaces.Discrete(5), "rel": spaces.Discrete(2)})}),
# Small image inside a dict
spaces.Dict({"img": spaces.Box(low=0, high=255, shape=(32, 32, 3), dtype=np.uint8)}),
],
)
def test_non_default_spaces(new_obs_space):
env = FakeImageEnv()
env.observation_space = new_obs_space
# Patch methods to avoid errors
env.reset = new_obs_space.sample
def patched_step(_action):
return new_obs_space.sample(), 0.0, False, {}
env.step = patched_step
with pytest.warns(UserWarning):
check_env(env)
def check_reset_assert_error(env, new_reset_return):
"""
Helper to check that the error is caught.
:param env: (gym.Env)
:param new_reset_return: (Any)
"""
def wrong_reset():
return new_reset_return
# Patch the reset method with a wrong one
env.reset = wrong_reset
with pytest.raises(AssertionError):
check_env(env)
def test_common_failures_reset():
"""
Test that common failure cases of the `reset_method` are caught
"""
env = IdentityEnvBox()
# Return an observation that does not match the observation_space
check_reset_assert_error(env, np.ones((3,)))
# The observation is not a numpy array
check_reset_assert_error(env, 1)
# Return not only the observation
check_reset_assert_error(env, (env.observation_space.sample(), False))
env = SimpleMultiObsEnv()
obs = env.reset()
def wrong_reset(self):
return {"img": obs["img"], "vec": obs["img"]}
env.reset = types.MethodType(wrong_reset, env)
with pytest.raises(AssertionError) as excinfo:
check_env(env)
# Check that the key is explicitly mentioned
assert "vec" in str(excinfo.value)
def check_step_assert_error(env, new_step_return=()):
"""
Helper to check that the error is caught.
:param env: (gym.Env)
:param new_step_return: (tuple)
"""
def wrong_step(_action):
return new_step_return
# Patch the step method with a wrong one
env.step = wrong_step
with pytest.raises(AssertionError):
check_env(env)
def test_common_failures_step():
"""
Test that common failure cases of the `step` method are caught
"""
env = IdentityEnvBox()
# Wrong shape for the observation
check_step_assert_error(env, (np.ones((4,)), 1.0, False, {}))
# Obs is not a numpy array
check_step_assert_error(env, (1, 1.0, False, {}))
# Return a wrong reward
check_step_assert_error(env, (env.observation_space.sample(), np.ones(1), False, {}))
# Info dict is not returned
check_step_assert_error(env, (env.observation_space.sample(), 0.0, False))
# Done is not a boolean
check_step_assert_error(env, (env.observation_space.sample(), 0.0, 3.0, {}))
check_step_assert_error(env, (env.observation_space.sample(), 0.0, 1, {}))
env = SimpleMultiObsEnv()
obs = env.reset()
def wrong_step(self, action):
return {"img": obs["vec"], "vec": obs["vec"]}, 0.0, False, {}
env.step = types.MethodType(wrong_step, env)
with pytest.raises(AssertionError) as excinfo:
check_env(env)
# Check that the key is explicitly mentioned
assert "img" in str(excinfo.value)
| 27.8
| 103
| 0.658437
|
70aa57d1cb693e044cda5c6617ece33af649a464
| 603
|
py
|
Python
|
what_is_Monero/tests/test_what_is_monero.py
|
Sleeptraphero/ProjectSE
|
8cc406c9a628b9849dfc778526b01ce8baea21b0
|
[
"MIT"
] | null | null | null |
what_is_Monero/tests/test_what_is_monero.py
|
Sleeptraphero/ProjectSE
|
8cc406c9a628b9849dfc778526b01ce8baea21b0
|
[
"MIT"
] | null | null | null |
what_is_Monero/tests/test_what_is_monero.py
|
Sleeptraphero/ProjectSE
|
8cc406c9a628b9849dfc778526b01ce8baea21b0
|
[
"MIT"
] | null | null | null |
from what_is_Monero.website import app
def test_index():
# create a version of our website that we can use for testing
with app.test_client() as test_client:
# mimic a browser: 'GET /', as if you visit the site
response = test_client.get('/')
# check that the HTTP response is a success
assert response.status_code == 200
# Store the contents of the html response in a local variable.
# This should be a string with the same content as the file index.html
html_content = response.data.decode()
assert "<html>" in html_content
| 33.5
| 78
| 0.668325
|
59c5f5df71924f0c1b252edff515fcfc9543d626
| 3,413
|
py
|
Python
|
jdleden/afdelingen.py
|
jonge-democraten/jdleden
|
f734796770f09d90928acb5d9790210243d8d024
|
[
"MIT"
] | null | null | null |
jdleden/afdelingen.py
|
jonge-democraten/jdleden
|
f734796770f09d90928acb5d9790210243d8d024
|
[
"MIT"
] | 5
|
2016-11-13T20:57:03.000Z
|
2020-06-05T17:35:45.000Z
|
jdleden/afdelingen.py
|
jonge-democraten/jdleden
|
f734796770f09d90928acb5d9790210243d8d024
|
[
"MIT"
] | 1
|
2017-01-14T15:05:41.000Z
|
2017-01-14T15:05:41.000Z
|
# All departments with their postal code ranges
AFDELINGEN = {
"Amsterdam":[
(1000,1229),
(1245,1392),
(1395,2159),
(2165,2165),
(3640,3650),
(8200,8249)],
"Leiden-Haaglanden":[
(2160,2164),
(2166,2750),
(2752,2760),
(2762,2799),
(3465,3466),
(3651,3653)],
"Rotterdam":[
(2751,2751),
(2761,2761),
(2800,3399),
(4200,4229),
(4248,4299)],
"Utrecht":[
(1230,1244),
(1393,1394),
(3400,3464),
(3467,3639),
(3654,3769),
(3777,3779),
(3782,3783),
(3786,3789),
(3793,3793),
(3795,3885),
(3887,3887),
(3889,3899),
(3908,3909),
(3913,3920),
(3923,3924),
(3926,3926),
(3928,3930),
(3932,3999),
(4008,4009),
(4015,4015),
(4018,4019),
(4022,4022),
(4025,4029),
(4034,4039),
(4055,4061),
(4063,4109),
(4113,4114),
(4118,4118),
(4120,4196),
(4198,4199),
(4230,4247),
(6746,6799),
(8250,8299)],
"Brabant":[
(4300,5339),
(5400,5409),
(5412,5437),
(5442,5443),
(5450,5767),
(5769,5799),
(6624,6624)],
"Arnhem-Nijmegen":[
(4042,4042),
(4044,4049),
(4052,4052),
(5340,5399),
(5410,5411),
(5438,5441),
(5444,5449),
(5805,5863),
(5873,5899),
(6500,6623),
(6625,6668),
(6670,6699),
(6719,6719),
(6722,6729),
(6734,6739),
(6742,6743),
(6800,6876),
(6878,6959),
(6976,7199),
(7208,7274)],
"Limburg":[
(5768,5768),
(5800,5804),
(5864,5872),
(5900,6499)],
"Overijssel":[
(3888,3888),
(6960,6975),
(7200,7207),
(7275,7350),
(7353,7704),
(7706,7714),
(7716,7739),
(7770,7799),
(7947,7969),
(8056,8065),
(8000,8054),
(8067,8099),
(8055,8055),
(8100,8199)],
"Groningen":[
(7705,7705),
(7715,7715),
(7740,7769),
(7800,7945),
(7970,7999),
(8350,8354),
(8380,8387),
(8437,8439),
(9300,9850),
(9854,9869),
(9874,9999)],
"Friesland":[
(7946,7946),
(8066,8066),
(8300,8349),
(8355,8379),
(8388,8436),
(8440,9299),
(9851,9853),
(9870,9873)],
"Wageningen":[
(3770,3776),
(3780,3781),
(3784,3785),
(3790,3792),
(3794,3794),
(3886,3886),
(3900,3907),
(3910,3912),
(3921,3922),
(3925,3925),
(3927,3927),
(3931,3931),
(4000,4007),
(4010,4014),
(4016,4017),
(4020,4021),
(4023,4024),
(4030,4033),
(4040,4041),
(4043,4043),
(4050,4051),
(4053,4054),
(4062,4062),
(4110,4112),
(4115,4117),
(4119,4119),
(4197,4197),
(6669,6669),
(6700,6718),
(6720,6721),
(6730,6733),
(6740,6741),
(6744,6745),
(6877,6877),
(7351,7352)],
}
| 20.810976
| 47
| 0.39496
|
8f142b2cf60312f740ffb8eda7fc236343bc8bae
| 897
|
py
|
Python
|
exercises/zh/test_01_12_03.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085
|
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/zh/test_01_12_03.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79
|
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/zh/test_01_12_03.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361
|
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
def test():
assert (
len(pattern) == 3
), "模板应该描述了三个词符(三个字典)。"
assert (
isinstance(pattern[0], dict)
and isinstance(pattern[1], dict)
and isinstance(pattern[2], dict)
), "模板中的每一项应该是一个字典。"
assert (
len(pattern[0]) == 1 and len(pattern[1]) == 1
), "首两个模板的里面应该只有一个键。"
assert len(pattern[2]) == 2, "第三个模板里面应该有两个键。"
assert any(
pattern[0].get(key) == "ADJ" for key in ["pos", "POS"]
), "你有用正确的标签去匹配第一个词符的词性标注了吗?"
assert any(
pattern[1].get(key) == "NOUN" for key in ["pos", "POS"]
), "你有用正确的标签去匹配第二个词符的词性标注了吗?"
assert any(
pattern[2].get(key) == "NOUN" for key in ["pos", "POS"]
), "你有用正确的标签去匹配第三个词符的词性标注了吗?"
assert (
pattern[2].get("OP") == "?"
), "你有对第三个词符使用了正确的运算符吗?"
__msg__.good(
"好极了!你刚刚写了不少挺复杂的模板!"
"我们可以继续下一章,看看还能用spaCy做哪些更多更先进的文本分析。"
)
| 28.935484
| 63
| 0.554069
|
cf43d11e973524da72868ed87c0f991f07ae2a41
| 124
|
py
|
Python
|
async/logger.py
|
harshal/django-async
|
ec350246a42a75bce2f22a70444420ef1fe741f4
|
[
"BSL-1.0"
] | null | null | null |
async/logger.py
|
harshal/django-async
|
ec350246a42a75bce2f22a70444420ef1fe741f4
|
[
"BSL-1.0"
] | 1
|
2019-11-06T17:11:05.000Z
|
2019-11-11T01:58:05.000Z
|
async/logger.py
|
harshal/django-async
|
ec350246a42a75bce2f22a70444420ef1fe741f4
|
[
"BSL-1.0"
] | 1
|
2019-11-06T13:29:36.000Z
|
2019-11-06T13:29:36.000Z
|
"""
Django Async logger.
"""
#pylint: disable=unused-import
import logging
_logger = logging.getLogger('async_logger')
| 15.5
| 43
| 0.725806
|
290168c3505d7e16704ad140fd9c8de5232aedca
| 117
|
py
|
Python
|
.history/calculator_factories_20210629130529.py
|
Aleff13/calculadora-tkinter
|
01e169d3c1d128976eb3a41ea1f53f11d6157e44
|
[
"MIT"
] | null | null | null |
.history/calculator_factories_20210629130529.py
|
Aleff13/calculadora-tkinter
|
01e169d3c1d128976eb3a41ea1f53f11d6157e44
|
[
"MIT"
] | null | null | null |
.history/calculator_factories_20210629130529.py
|
Aleff13/calculadora-tkinter
|
01e169d3c1d128976eb3a41ea1f53f11d6157e44
|
[
"MIT"
] | null | null | null |
import tkinter as tk
def make_root() -> tk.Tk:
root = tk.Tk()
root.title("Calculator")
root.config(padx)
| 19.5
| 28
| 0.632479
|
b49abf357034c2fc21bd404c197d119425d93fd0
| 8,581
|
py
|
Python
|
catkin_tools/verbs/catkin_config/cli.py
|
timonegk/catkin_tools
|
70ca62f67dc125e5879864a7a80c261b9a9bc914
|
[
"Apache-2.0"
] | null | null | null |
catkin_tools/verbs/catkin_config/cli.py
|
timonegk/catkin_tools
|
70ca62f67dc125e5879864a7a80c261b9a9bc914
|
[
"Apache-2.0"
] | null | null | null |
catkin_tools/verbs/catkin_config/cli.py
|
timonegk/catkin_tools
|
70ca62f67dc125e5879864a7a80c261b9a9bc914
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from catkin_tools.argument_parsing import add_cmake_and_make_and_catkin_make_args
from catkin_tools.argument_parsing import add_context_args
from catkin_tools.context import Context
from catkin_tools.terminal_color import ColorMapper, sanitize
color_mapper = ColorMapper()
clr = color_mapper.clr
def prepare_arguments(parser):
parser.description = "This verb is used to configure a catkin workspace's\
configuration and layout. Calling `catkin config` with no arguments will\
display the current config and affect no changes if a config already exists\
for the current workspace and profile."
# Workspace / profile args
add_context_args(parser)
behavior_group = parser.add_argument_group('Behavior', 'Options affecting argument handling.')
add = behavior_group.add_mutually_exclusive_group().add_argument
add('--append-args', '-a', action='store_true', default=False,
help='For list-type arguments, append elements.')
add('--remove-args', '-r', action='store_true', default=False,
help='For list-type arguments, remove elements.')
context_group = parser.add_argument_group('Workspace Context', 'Options affecting the context of the workspace.')
add = context_group.add_argument
add('--init', action='store_true', default=False,
help='Initialize a workspace if it does not yet exist.')
add = context_group.add_mutually_exclusive_group().add_argument
add('--extend', '-e', dest='extend_path', type=str,
help='Explicitly extend the result-space of another catkin workspace, '
'overriding the value of $CMAKE_PREFIX_PATH.')
add('--no-extend', dest='extend_path', action='store_const', const='',
help='Un-set the explicit extension of another workspace as set by --extend.')
add = context_group.add_argument
add('--mkdirs', action='store_true', default=False,
help='Create directories required by the configuration (e.g. source space) if they do not already exist.')
create_group = parser.add_argument_group(
'Package Create Defaults', 'Information of default authors/maintainers of created packages')
add = create_group.add_mutually_exclusive_group().add_argument
add('--authors', metavar=('NAME', 'EMAIL'), dest='authors', nargs='+', required=False, type=str, default=None,
help='Set the default authors of created packages')
add('--maintainers', metavar=('NAME', 'EMAIL'), dest='maintainers', nargs='+',
required=False, type=str, default=None,
help='Set the default maintainers of created packages')
add('--licenses', metavar=('LICENSE'), dest='licenses', nargs='+', required=False, type=str, default=None,
help='Set the default licenses of created packages')
lists_group = parser.add_argument_group(
'Package Build Defaults', 'Packages to include or exclude from default build behavior.')
add = lists_group.add_mutually_exclusive_group().add_argument
add('--whitelist', metavar="PKG", dest='whitelist', nargs="+", required=False, type=str, default=None,
help='Set the packages on the whitelist. If the whitelist is non-empty, '
'only the packages on the whitelist are built with a bare call to '
'`catkin build`.')
add('--no-whitelist', dest='whitelist', action='store_const', const=[], default=None,
help='Clear all packages from the whitelist.')
add = lists_group.add_mutually_exclusive_group().add_argument
add('--blacklist', metavar="PKG", dest='blacklist', nargs="+", required=False, type=str, default=None,
help='Set the packages on the blacklist. Packages on the blacklist are '
'not built with a bare call to `catkin build`.')
add('--no-blacklist', dest='blacklist', action='store_const', const=[], default=None,
help='Clear all packages from the blacklist.')
spaces_group = parser.add_argument_group('Spaces', 'Location of parts of the catkin workspace.')
Context.setup_space_keys()
for space, space_dict in Context.SPACES.items():
add = spaces_group.add_mutually_exclusive_group().add_argument
flags = ['--{}-space'.format(space)]
flags.extend([space_dict['short_flag']] if 'short_flag' in space_dict else [])
add(*flags, default=None,
help='The path to the {} space.'.format(space))
add('--default-{}-space'.format(space),
action='store_const', dest='{}_space'.format(space), default=None, const=space_dict['default'],
help='Use the default path to the {} space ("{}")'.format(space, space_dict['default']))
add = spaces_group.add_argument
add('-x', '--space-suffix',
help='Suffix for build, devel, and install space if they are not otherwise explicitly set.')
devel_group = parser.add_argument_group(
'Devel Space', 'Options for configuring the structure of the devel space.')
add = devel_group.add_mutually_exclusive_group().add_argument
add('--link-devel', dest='devel_layout', action='store_const', const='linked', default=None,
help='Build products from each catkin package into isolated spaces,'
' then symbolically link them into a merged devel space.')
add('--merge-devel', dest='devel_layout', action='store_const', const='merged', default=None,
help='Build products from each catkin package into a single merged devel spaces.')
add('--isolate-devel', dest='devel_layout', action='store_const', const='isolated', default=None,
help='Build products from each catkin package into isolated devel spaces.')
install_group = parser.add_argument_group(
'Install Space', 'Options for configuring the structure of the install space.')
add = install_group.add_mutually_exclusive_group().add_argument
add('--install', action='store_true', default=None,
help='Causes each package to be installed to the install space.')
add('--no-install', dest='install', action='store_false', default=None,
help='Disables installing each package into the install space.')
add = install_group.add_mutually_exclusive_group().add_argument
add('--isolate-install', action='store_true', default=None,
help='Install each catkin package into a separate install space.')
add('--merge-install', dest='isolate_install', action='store_false', default=None,
help='Install each catkin package into a single merged install space.')
build_group = parser.add_argument_group('Build Options', 'Options for configuring the way packages are built.')
add_cmake_and_make_and_catkin_make_args(build_group)
return parser
def main(opts):
try:
# Determine if the user is trying to perform some action, in which
# case, the workspace should be automatically initialized
ignored_opts = ['main', 'verb']
actions = [v for k, v in vars(opts).items() if k not in ignored_opts]
no_action = not any(actions)
# Try to find a metadata directory to get context defaults
# Otherwise use the specified directory
context = Context.load(
opts.workspace,
opts.profile,
opts,
append=opts.append_args,
remove=opts.remove_args)
do_init = opts.init or not no_action
summary_notes = []
if not context.initialized() and do_init:
summary_notes.append(clr('@!@{cf}Initialized new catkin workspace in `%s`@|' % sanitize(context.workspace)))
if context.initialized() or do_init:
Context.save(context)
if opts.mkdirs and not context.source_space_exists():
os.makedirs(context.source_space_abs)
print(context.summary(notes=summary_notes))
except IOError as exc:
# Usually happens if workspace is already underneath another catkin_tools workspace
print('error: could not configure catkin workspace: %s' % exc.message)
return 1
return 0
| 50.775148
| 120
| 0.701084
|
41893adb5a6d80070ffa1272a104f71a8c20d80f
| 1,331
|
py
|
Python
|
app/migrations/versions/e96865e4fa62_users_added_marks.py
|
sachisabya28/quiz-app-beta
|
5bc5ee33dc9025a0bc23d541bfca6a318ea32c26
|
[
"MIT"
] | 3
|
2021-05-31T16:46:00.000Z
|
2022-01-16T00:19:37.000Z
|
app/migrations/versions/e96865e4fa62_users_added_marks.py
|
sachisabya28/quiz-app-beta
|
5bc5ee33dc9025a0bc23d541bfca6a318ea32c26
|
[
"MIT"
] | null | null | null |
app/migrations/versions/e96865e4fa62_users_added_marks.py
|
sachisabya28/quiz-app-beta
|
5bc5ee33dc9025a0bc23d541bfca6a318ea32c26
|
[
"MIT"
] | null | null | null |
"""users added marks
Revision ID: e96865e4fa62
Revises: 59069a416ef2
Create Date: 2020-09-23 14:29:28.764869
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e96865e4fa62'
down_revision = '59069a416ef2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('questions',
sa.Column('q_id', sa.Integer(), nullable=False),
sa.Column('ques', sa.String(length=350), nullable=True),
sa.Column('a', sa.String(length=100), nullable=True),
sa.Column('b', sa.String(length=100), nullable=True),
sa.Column('c', sa.String(length=100), nullable=True),
sa.Column('d', sa.String(length=100), nullable=True),
sa.Column('ans', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('q_id'),
sa.UniqueConstraint('ques')
)
op.add_column('user', sa.Column('marks', sa.Integer(), nullable=True))
op.create_index(op.f('ix_user_marks'), 'user', ['marks'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_marks'), table_name='user')
op.drop_column('user', 'marks')
op.drop_table('questions')
# ### end Alembic commands ###
| 30.953488
| 75
| 0.674681
|
6da33cbb0da7d57fc51c3919b70252f723b95373
| 26,910
|
py
|
Python
|
backdoor.py
|
Rainism/the-backdoor-factory
|
6237b9ff84c9ca83e9bdedb9cc93a4865c776bc0
|
[
"BSD-3-Clause"
] | null | null | null |
backdoor.py
|
Rainism/the-backdoor-factory
|
6237b9ff84c9ca83e9bdedb9cc93a4865c776bc0
|
[
"BSD-3-Clause"
] | null | null | null |
backdoor.py
|
Rainism/the-backdoor-factory
|
6237b9ff84c9ca83e9bdedb9cc93a4865c776bc0
|
[
"BSD-3-Clause"
] | 1
|
2020-03-19T04:04:09.000Z
|
2020-03-19T04:04:09.000Z
|
#!/usr/bin/env python
'''
BackdoorFactory (BDF) v2 - Tertium Quid
Many thanks to Ryan O'Neill --ryan 'at' codeslum <d ot> org--
Without him, I would still be trying to do stupid things
with the elf format.
Also thanks to Silvio Cesare with his 1998 paper
(http://vxheaven.org/lib/vsc01.html) which these ELF patching
techniques are based on.
Special thanks to Travis Morrow for poking holes in my ideas.
Copyright (c) 2013-2015, Joshua Pitts
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
import sys
import os
import signal
import time
from random import choice
from optparse import OptionParser
from pebin import pebin
from elfbin import elfbin
from machobin import machobin
def signal_handler(signal, frame):
print '\nProgram Exit'
sys.exit(0)
class bdfMain():
version = """\
2.4.1
"""
author = """\
Author: Joshua Pitts
Email: the.midnite.runr[a t]gmail<d o t>com
Twitter: @midnite_runr
"""
#ASCII ART
menu = ["-.(`-') (`-') _ <-"
".(`-') _(`-') (`-')\n"
"__( OO) (OO ).-/ _ __( OO)"
"( (OO ).-> .-> .-> <-.(OO ) \n"
"'-'---.\ / ,---. \-,-----.'-'. ,--"
".\ .'_ (`-')----. (`-')----. ,------,) \n"
"| .-. (/ | \ /`.\ | .--./| .' /"
"'`'-..__)( OO).-. '( OO).-. '| /`. ' \n"
"| '-' `.) '-'|_.' | /_) (`-')| /)"
"| | ' |( _) | | |( _) | | || |_.' | \n"
"| /`'. |(| .-. | || |OO )| . ' |"
" | / : \| |)| | \| |)| || . .' \n"
"| '--' / | | | |(_' '--'\| |\ \|"
" '-' / ' '-' ' ' '-' '| |\ \ \n"
"`------' `--' `--' `-----'`--' '--'"
"`------' `-----' `-----' `--' '--' \n"
" (`-') _ (`-') "
" (`-') \n"
" <-. (OO ).-/ _ ( OO).-> "
" .-> <-.(OO ) .-> \n"
"(`-')-----./ ,---. \-,-----./ '._"
" (`-')----. ,------,) ,--.' ,-. \n"
"(OO|(_\---'| \ /`.\ | .--./|'--...__)"
"( OO).-. '| /`. '(`-')'.' / \n"
" / | '--. '-'|_.' | /_) (`-')`--. .--'"
"( _) | | || |_.' |(OO \ / \n"
" \_) .--'(| .-. | || |OO ) | | "
" \| |)| || . .' | / /) \n"
" `| |_) | | | |(_' '--'\ | | "
" ' '-' '| |\ \ `-/ /` \n"
" `--' `--' `--' `-----' `--' "
" `-----' `--' '--' `--' \n",
"__________ "
" __ .___ \n"
"\______ \_____ ____ "
"| | __ __| _/____ ___________ \n"
" | | _/\__ \ _/ ___\|"
" |/ // __ |/ _ \ / _ \_ __ \ \n"
" | | \ / __ \\\\ \__"
"_| </ /_/ ( <_> | <_> ) | \/\n"
" |______ /(____ /\___ >"
"__|_ \____ |\____/ \____/|__| \n"
" \/ \/ \/"
" \/ \/ \n"
"___________ "
"__ \n"
"\_ _____/____ _____/"
" |_ ___________ ___.__. \n"
" | __) \__ \ _/ ___\ "
" __\/ _ \_ __ < | | \n"
" | \ / __ \\\\ \__"
"_| | ( <_> ) | \/\___ | \n"
" \___ / (____ /\___ >_"
"_| \____/|__| / ____| \n"
" \/ \/ \/ "
" \/ \n"]
signal.signal(signal.SIGINT, signal_handler)
parser = OptionParser()
parser.add_option("-f", "--file", dest="FILE", action="store",
type="string",
help="File to backdoor")
parser.add_option("-s", "--shell", default="show", dest="SHELL",
action="store", type="string",
help="Payloads that are available for use."
" Use 'show' to see payloads."
)
parser.add_option("-H", "--hostip", default=None, dest="HOST",
action="store", type="string",
help="IP of the C2 for reverse connections.")
parser.add_option("-P", "--port", default=None, dest="PORT",
action="store", type="int",
help="The port to either connect back to for reverse "
"shells or to listen on for bind shells")
parser.add_option("-J", "--cave_jumping", dest="CAVE_JUMPING",
default=False, action="store_true",
help="Select this options if you want to use code cave"
" jumping to further hide your shellcode in the binary."
)
parser.add_option("-a", "--add_new_section", default=False,
dest="ADD_SECTION", action="store_true",
help="Mandating that a new section be added to the "
"exe (better success) but less av avoidance")
parser.add_option("-U", "--user_shellcode", default=None,
dest="SUPPLIED_SHELLCODE", action="store",
help="User supplied shellcode, make sure that it matches"
" the architecture that you are targeting."
)
parser.add_option("-c", "--cave", default=False, dest="FIND_CAVES",
action="store_true",
help="The cave flag will find code caves that "
"can be used for stashing shellcode. "
"This will print to all the code caves "
"of a specific size."
"The -l flag can be use with this setting.")
parser.add_option("-l", "--shell_length", default=380, dest="SHELL_LEN",
action="store", type="int",
help="For use with -c to help find code "
"caves of different sizes")
parser.add_option("-o", "--output-file", default=None, dest="OUTPUT",
action="store", type="string",
help="The backdoor output file")
parser.add_option("-n", "--section", default="sdata", dest="NSECTION",
action="store", type="string",
help="New section name must be "
"less than seven characters")
parser.add_option("-d", "--directory", dest="DIR", action="store",
type="string",
help="This is the location of the files that "
"you want to backdoor. "
"You can make a directory of file backdooring faster by "
"forcing the attaching of a codecave "
"to the exe by using the -a setting.")
parser.add_option("-w", "--change_access", default=True,
dest="CHANGE_ACCESS", action="store_false",
help="This flag changes the section that houses "
"the codecave to RWE. Sometimes this is necessary. "
"Enabled by default. If disabled, the "
"backdoor may fail.")
parser.add_option("-i", "--injector", default=False, dest="INJECTOR",
action="store_true",
help="This command turns the backdoor factory in a "
"hunt and shellcode inject type of mechanism. Edit "
"the target settings in the injector module.")
parser.add_option("-u", "--suffix", default=".old", dest="SUFFIX",
action="store", type="string",
help="For use with injector, places a suffix"
" on the original file for easy recovery")
parser.add_option("-D", "--delete_original", dest="DELETE_ORIGINAL",
default=False, action="store_true",
help="For use with injector module. This command"
" deletes the original file. Not for use in production "
"systems. *Author not responsible for stupid uses.*")
parser.add_option("-O", "--disk_offset", dest="DISK_OFFSET", default=0,
type="int", action="store",
help="Starting point on disk offset, in bytes. "
"Some authors want to obfuscate their on disk offset "
"to avoid reverse engineering, if you find one of those "
"files use this flag, after you find the offset.")
parser.add_option("-S", "--support_check", dest="SUPPORT_CHECK",
default=False, action="store_true",
help="To determine if the file is supported by BDF prior"
" to backdooring the file. For use by itself or with "
"verbose. This check happens automatically if the "
"backdooring is attempted."
)
parser.add_option("-M", "--cave-miner", dest="CAVE_MINER", default=False, action="store_true",
help="Future use, to help determine smallest shellcode possible in a PE file"
)
parser.add_option("-q", "--no_banner", dest="NO_BANNER", default=False, action="store_true",
help="Kills the banner."
)
parser.add_option("-v", "--verbose", default=False, dest="VERBOSE",
action="store_true",
help="For debug information output.")
parser.add_option("-T", "--image-type", dest="IMAGE_TYPE", default="ALL",
type='string',
action="store", help="ALL, x86, or x64 type binaries only. Default=ALL")
parser.add_option("-Z", "--zero_cert", dest="ZERO_CERT", default=True, action="store_false",
help="Allows for the overwriting of the pointer to the PE certificate table"
" effectively removing the certificate from the binary for all intents"
" and purposes."
)
parser.add_option("-R", "--runas_admin", dest="CHECK_ADMIN", default=False, action="store_true",
help="Checks the PE binaries for \'requestedExecutionLevel level=\"highestAvailable\"\'"
". If this string is included in the binary, it must run as system/admin. Doing this "
"slows patching speed significantly."
)
parser.add_option("-L", "--patch_dll", dest="PATCH_DLL", default=True, action="store_false",
help="Use this setting if you DON'T want to patch DLLs. Patches by default."
)
parser.add_option("-F", "--fat_priority", dest="FAT_PRIORITY", default="x64", action="store",
help="For MACH-O format. If fat file, focus on which arch to patch. Default "
"is x64. To force x86 use -F x86, to force both archs use -F ALL."
)
parser.add_option("-B", "--beacon", dest="BEACON", default=15, action="store", type="int",
help="For payloads that have the ability to beacon out, set the time in secs"
)
(options, args) = parser.parse_args()
def basicDiscovery(FILE):
macho_supported = ['\xcf\xfa\xed\xfe', '\xca\xfe\xba\xbe',
'\xce\xfa\xed\xfe',
]
testBinary = open(FILE, 'rb')
header = testBinary.read(4)
testBinary.close()
if 'MZ' in header:
return 'PE'
elif 'ELF' in header:
return 'ELF'
elif header in macho_supported:
return "MACHO"
else:
'Only support ELF, PE, and MACH-O file formats'
return None
if options.NO_BANNER is False:
print choice(menu)
print author
print version
time.sleep(1)
if options.DIR:
for root, subFolders, files in os.walk(options.DIR):
for _file in files:
options.FILE = os.path.join(root, _file)
if os.path.isdir(options.FILE) is True:
print "Directory found, continuing"
continue
is_supported = basicDiscovery(options.FILE)
if is_supported is "PE":
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.CAVE_MINER,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
elif is_supported is "ELF":
supported_file = elfbin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.FIND_CAVES,
options.SHELL_LEN,
options.SUPPLIED_SHELLCODE,
options.IMAGE_TYPE
)
elif is_supported is "MACHO":
supported_file = machobin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.SUPPLIED_SHELLCODE,
options.FAT_PRIORITY,
options.BEACON
)
if options.SUPPORT_CHECK is True:
if os.path.isfile(options.FILE):
is_supported = False
print "file", options.FILE
try:
is_supported = supported_file.support_check()
except Exception, e:
is_supported = False
print 'Exception:', str(e), '%s' % options.FILE
if is_supported is False or is_supported is None:
print "%s is not supported." % options.FILE
#continue
else:
print "%s is supported." % options.FILE
# if supported_file.flItms['runas_admin'] is True:
# print "%s must be run as admin." % options.FILE
print "*" * 50
if options.SUPPORT_CHECK is True:
sys.exit()
print ("You are going to backdoor the following "
"items in the %s directory:"
% options.DIR)
dirlisting = os.listdir(options.DIR)
for item in dirlisting:
print " {0}".format(item)
answer = raw_input("Do you want to continue? (yes/no) ")
if 'yes' in answer.lower():
for item in dirlisting:
#print item
print "*" * 50
options.File = options.DIR + '/' + item
if os.path.isdir(options.FILE) is True:
print "Directory found, continuing"
continue
print ("backdooring file %s" % item)
result = None
is_supported = basicDiscovery(options.FILE)
try:
if is_supported is "PE":
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.CAVE_MINER,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
supported_file.OUTPUT = None
supported_file.output_options()
result = supported_file.patch_pe()
elif is_supported is "ELF":
supported_file = elfbin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.FIND_CAVES,
options.SHELL_LEN,
options.SUPPLIED_SHELLCODE,
options.IMAGE_TYPE
)
supported_file.OUTPUT = None
supported_file.output_options()
result = supported_file.patch_elf()
elif is_supported is "MACHO":
supported_file = machobin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.SUPPLIED_SHELLCODE,
options.FAT_PRIORITY,
options.BEACON
)
supported_file.OUTPUT = None
supported_file.output_options()
result = supported_file.patch_macho()
if result is None:
print 'Not Supported. Continuing'
continue
else:
print ("[*] File {0} is in backdoored "
"directory".format(supported_file.FILE))
except Exception as e:
print "DIR ERROR", str(e)
else:
print("Goodbye")
sys.exit()
if options.INJECTOR is True:
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
supported_file.injector()
sys.exit()
if not options.FILE:
parser.print_help()
sys.exit()
#OUTPUT = output_options(options.FILE, options.OUTPUT)
is_supported = basicDiscovery(options.FILE)
if is_supported is "PE":
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.CAVE_MINER,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
elif is_supported is "ELF":
supported_file = elfbin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.FIND_CAVES,
options.SHELL_LEN,
options.SUPPLIED_SHELLCODE,
options.IMAGE_TYPE
)
elif is_supported is "MACHO":
supported_file = machobin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.SUPPLIED_SHELLCODE,
options.FAT_PRIORITY,
options.BEACON
)
else:
print "Not supported."
sys.exit()
result = supported_file.run_this()
if result is True and options.SUPPORT_CHECK is False:
print "File {0} is in the 'backdoored' directory".format(os.path.basename(supported_file.OUTPUT))
#END BDF MAIN
if __name__ == "__main__":
bdfMain()
| 49.016393
| 110
| 0.413452
|
dd51ddb1ffafe5dced436431a42e76d79e1f94b4
| 23,632
|
py
|
Python
|
pymatgen/core/tests/test_lattice.py
|
Chessmag/pymatgen
|
61a4bb7a1792e1ea2379abd45b3c40efb816fd64
|
[
"MIT"
] | 1
|
2021-10-18T01:26:50.000Z
|
2021-10-18T01:26:50.000Z
|
pymatgen/core/tests/test_lattice.py
|
Chessmag/pymatgen
|
61a4bb7a1792e1ea2379abd45b3c40efb816fd64
|
[
"MIT"
] | null | null | null |
pymatgen/core/tests/test_lattice.py
|
Chessmag/pymatgen
|
61a4bb7a1792e1ea2379abd45b3c40efb816fd64
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import itertools
from pymatgen.core.lattice import Lattice, get_points_in_spheres
import numpy as np
from pymatgen.util.testing import PymatgenTest
from pymatgen.core.operations import SymmOp
class LatticeTestCase(PymatgenTest):
def setUp(self):
self.lattice = Lattice.cubic(10.0)
self.cubic = self.lattice
self.tetragonal = Lattice.tetragonal(10, 20)
self.orthorhombic = Lattice.orthorhombic(10, 20, 30)
self.monoclinic = Lattice.monoclinic(10, 20, 30, 66)
self.hexagonal = Lattice.hexagonal(10, 20)
self.rhombohedral = Lattice.rhombohedral(10, 77)
family_names = ["cubic", "tetragonal", "orthorhombic", "monoclinic",
"hexagonal", "rhombohedral"]
self.families = {}
for name in family_names:
self.families[name] = getattr(self, name)
def test_format(self):
self.assertEqual("[[10.000, 0.000, 0.000], [0.000, 10.000, 0.000], [0.000, 0.000, 10.000]]",
format(self.lattice, ".3fl"))
self.assertEqual(
"""10.000 0.000 0.000
0.000 10.000 0.000
0.000 0.000 10.000""",
format(self.lattice, ".3f"))
self.assertEqual("{10.0, 10.0, 10.0, 90.0, 90.0, 90.0}",
format(self.lattice, ".1fp"))
def test_init(self):
a = 9.026
lattice = Lattice.cubic(a)
self.assertIsNotNone(lattice, "Initialization from new_cubic failed")
lattice2 = Lattice([[a, 0, 0], [0, a, 0], [0, 0, a]])
for i in range(0, 3):
for j in range(0, 3):
self.assertAlmostEqual(lattice.matrix[i][j],
lattice2.matrix[i][j], 5,
"Inconsistent matrix from two inits!")
def test_copy(self):
cubic_copy = self.cubic.copy()
self.assertTrue(cubic_copy == self.cubic)
self.assertFalse(cubic_copy._matrix is self.cubic._matrix)
def test_get_cartesian_or_frac_coord(self):
coord = self.lattice.get_cartesian_coords([0.15, 0.3, 0.4])
self.assertArrayAlmostEqual(coord, [1.5, 3., 4.])
self.assertArrayAlmostEqual(
self.tetragonal.get_fractional_coords([12.12312, 45.2134,
1.3434]),
[1.212312, 4.52134, 0.06717])
# Random testing that get_cart and get_frac coords reverses each other.
rand_coord = np.random.random_sample(3)
coord = self.tetragonal.get_cartesian_coords(rand_coord)
fcoord = self.tetragonal.get_fractional_coords(coord)
self.assertArrayAlmostEqual(fcoord, rand_coord)
def test_get_vector_along_lattice_directions(self):
lattice_mat = np.array([[0.5, 0., 0.],
[0.5, np.sqrt(3) / 2., 0.],
[0., 0., 1.0]])
lattice = Lattice(lattice_mat)
cart_coord = np.array([0.5, np.sqrt(3) / 4., 0.5])
latt_coord = np.array([0.25, 0.5, 0.5])
from_direct = lattice.get_fractional_coords(cart_coord) * lattice.lengths
self.assertArrayAlmostEqual(lattice.get_vector_along_lattice_directions(cart_coord), from_direct)
self.assertArrayAlmostEqual(lattice.get_vector_along_lattice_directions(cart_coord), latt_coord)
self.assertArrayEqual(lattice.get_vector_along_lattice_directions(cart_coord).shape, [3, ])
self.assertArrayEqual(lattice.get_vector_along_lattice_directions(cart_coord.reshape([1, 3])).shape, [1, 3])
def test_d_hkl(self):
cubic_copy = self.cubic.copy()
hkl = (1, 2, 3)
dhkl = ((hkl[0] ** 2 + hkl[1] ** 2 + hkl[2] ** 2) / (cubic_copy.a ** 2)) ** (-1 / 2)
self.assertEqual(dhkl, cubic_copy.d_hkl(hkl))
def test_reciprocal_lattice(self):
recip_latt = self.lattice.reciprocal_lattice
self.assertArrayAlmostEqual(recip_latt.matrix,
0.628319 * np.eye(3), 5)
self.assertArrayAlmostEqual(self.tetragonal.reciprocal_lattice.matrix,
[[0.628319, 0., 0.], [0., 0.628319, 0],
[0., 0., 0.3141590]], 5)
# Test the crystallographic version.
recip_latt_xtal = self.lattice.reciprocal_lattice_crystallographic
self.assertArrayAlmostEqual(recip_latt.matrix,
recip_latt_xtal.matrix * 2 * np.pi,
5)
def test_static_methods(self):
lengths_c = [3.840198, 3.84019885, 3.8401976]
angles_c = [119.99998575, 90, 60.00000728]
mat_c = [[3.840198, 0.000000, 0.0000], [1.920099, 3.325710, 0.000000],
[0.000000, -2.217138, 3.135509]]
# should give the lengths and angles above
newlatt = Lattice(mat_c)
lengths = newlatt.lengths
angles = newlatt.angles
for i in range(0, 3):
self.assertAlmostEqual(lengths[i], lengths_c[i], 5,
"Lengths incorrect!")
self.assertAlmostEqual(angles[i], angles_c[i], 5,
"Angles incorrect!")
latt = Lattice.from_parameters(*lengths, *angles)
lengths = latt.lengths
angles = latt.angles
for i in range(0, 3):
self.assertAlmostEqual(lengths[i], lengths_c[i], 5,
"Lengths incorrect!")
self.assertAlmostEqual(angles[i], angles_c[i], 5,
"Angles incorrect!")
def test_attributes(self):
"""docstring for test_attributes"""
lattice = Lattice.cubic(10.0)
self.assertEqual(lattice.a, 10.0)
self.assertEqual(lattice.b, 10.0)
self.assertEqual(lattice.c, 10.0)
self.assertAlmostEqual(lattice.volume, 1000.0)
xyz = lattice.get_cartesian_coords([0.25, 0.35, 0.45])
self.assertEqual(xyz[0], 2.5)
self.assertEqual(xyz[1], 3.5)
self.assertEqual(xyz[2], 4.5)
def test_lattice_matrices(self):
"""
If alpha == 90 and beta == 90, two matricies are identical.
"""
def _identical(a, b, c, alpha, beta, gamma):
mat1 = Lattice.from_parameters(a, b, c, alpha, beta, gamma, False).matrix
mat2 = Lattice.from_parameters(a, b, c, alpha, beta, gamma, True).matrix
# self.assertArrayAlmostEqual(mat1, mat2)
return ((mat1 - mat2) ** 2).sum() < 1e-6
self.assertTrue(_identical(2, 3, 4, 90, 90, 90))
self.assertTrue(_identical(2, 3, 4, 90, 90, 80))
self.assertTrue(_identical(2, 3, 4, 90, 90, 100))
self.assertFalse(_identical(2, 3, 4, 100, 90, 90))
self.assertFalse(_identical(2, 3, 4, 90, 100, 90))
self.assertFalse(_identical(2, 3, 4, 100, 100, 100))
def test_get_lll_reduced_lattice(self):
lattice = Lattice([1.0, 1, 1, -1.0, 0, 2, 3.0, 5, 6])
reduced_latt = lattice.get_lll_reduced_lattice()
expected_ans = Lattice(np.array(
[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, -2.0, 0.0, 1.0]).reshape((3, 3)))
self.assertAlmostEqual(
np.linalg.det(np.linalg.solve(expected_ans.matrix,
reduced_latt.matrix)),
1)
self.assertArrayAlmostEqual(
sorted(reduced_latt.abc), sorted(expected_ans.abc))
self.assertAlmostEqual(reduced_latt.volume, lattice.volume)
latt = [7.164750, 2.481942, 0.000000,
- 4.298850, 2.481942, 0.000000,
0.000000, 0.000000, 14.253000]
expected_ans = Lattice(np.array(
[-4.298850, 2.481942, 0.000000, 2.865900, 4.963884, 0.000000,
0.000000, 0.000000, 14.253000]))
reduced_latt = Lattice(latt).get_lll_reduced_lattice()
self.assertAlmostEqual(
np.linalg.det(np.linalg.solve(expected_ans.matrix,
reduced_latt.matrix)),
1)
self.assertArrayAlmostEqual(
sorted(reduced_latt.abc), sorted(expected_ans.abc))
expected_ans = Lattice([0.0, 10.0, 10.0,
10.0, 10.0, 0.0,
30.0, -30.0, 40.0])
lattice = np.array([100., 0., 10., 10., 10., 20., 10., 10., 10.])
lattice = lattice.reshape(3, 3)
lattice = Lattice(lattice.T)
reduced_latt = lattice.get_lll_reduced_lattice()
self.assertAlmostEqual(
np.linalg.det(np.linalg.solve(expected_ans.matrix,
reduced_latt.matrix)),
1)
self.assertArrayAlmostEqual(
sorted(reduced_latt.abc), sorted(expected_ans.abc))
random_latt = Lattice(np.random.random((3, 3)))
if np.linalg.det(random_latt.matrix) > 1e-8:
reduced_random_latt = random_latt.get_lll_reduced_lattice()
self.assertAlmostEqual(reduced_random_latt.volume,
random_latt.volume)
def test_get_niggli_reduced_lattice(self):
latt = Lattice.from_parameters(3, 5.196, 2, 103 + 55 / 60,
109 + 28 / 60,
134 + 53 / 60)
reduced_cell = latt.get_niggli_reduced_lattice()
abc = reduced_cell.lengths
angles = reduced_cell.angles
self.assertAlmostEqual(abc[0], 2, 3)
self.assertAlmostEqual(abc[1], 3, 3)
self.assertAlmostEqual(abc[2], 3, 3)
self.assertAlmostEqual(angles[0], 116.382855225, 3)
self.assertAlmostEqual(angles[1], 94.769790287999996, 3)
self.assertAlmostEqual(angles[2], 109.466666667, 3)
mat = [[5.0, 0, 0], [0, 5.0, 0], [5.0, 0, 5.0]]
latt = Lattice(np.dot([[1, 1, 1], [1, 1, 0], [0, 1, 1]], mat))
reduced_cell = latt.get_niggli_reduced_lattice()
abc = reduced_cell.lengths
angles = reduced_cell.angles
for l in abc:
self.assertAlmostEqual(l, 5, 3)
for a in angles:
self.assertAlmostEqual(a, 90, 3)
latt = Lattice([1.432950, 0.827314, 4.751000, -1.432950, 0.827314,
4.751000, 0.0, -1.654628, 4.751000])
# ans = [[-1.432950, -2.481942, 0.0],
# [-2.8659, 0.0, 0.0],
# [-1.432950, -0.827314, -4.751000]]
ans = [[-1.43295, -2.481942, 0.], [-2.8659, 0., 0.], [-1.43295, -0.827314, -4.751]]
self.assertArrayAlmostEqual(latt.get_niggli_reduced_lattice().matrix,
ans)
latt = Lattice.from_parameters(7.365450, 6.199506, 5.353878,
75.542191, 81.181757, 156.396627)
ans = [[2.578932, 0.826965, 0.000000],
[-0.831059, 2.067413, 1.547813],
[-0.458407, -2.480895, 1.129126]]
self.assertArrayAlmostEqual(latt.get_niggli_reduced_lattice().matrix,
np.array(ans), 5)
def test_find_mapping(self):
m = np.array([[0.1, 0.2, 0.3], [-0.1, 0.2, 0.7], [0.6, 0.9, 0.2]])
latt = Lattice(m)
op = SymmOp.from_origin_axis_angle([0, 0, 0], [2, 3, 3], 35)
rot = op.rotation_matrix
scale = np.array([[1, 1, 0], [0, 1, 0], [0, 0, 1]])
latt2 = Lattice(np.dot(rot, np.dot(scale, m).T).T)
(aligned_out, rot_out, scale_out) = latt2.find_mapping(latt)
self.assertAlmostEqual(abs(np.linalg.det(rot)), 1)
rotated = SymmOp.from_rotation_and_translation(rot_out).operate_multi(latt.matrix)
self.assertArrayAlmostEqual(rotated, aligned_out.matrix)
self.assertArrayAlmostEqual(np.dot(scale_out, latt2.matrix), aligned_out.matrix)
self.assertArrayAlmostEqual(aligned_out.parameters, latt.parameters)
self.assertFalse(np.allclose(aligned_out.parameters, latt2.parameters))
def test_find_all_mappings(self):
m = np.array([[0.1, 0.2, 0.3], [-0.1, 0.2, 0.7], [0.6, 0.9, 0.2]])
latt = Lattice(m)
op = SymmOp.from_origin_axis_angle([0, 0, 0], [2, -1, 3], 40)
rot = op.rotation_matrix
scale = np.array([[0, 2, 0], [1, 1, 0], [0, 0, 1]])
latt2 = Lattice(np.dot(rot, np.dot(scale, m).T).T)
for (aligned_out, rot_out, scale_out) in latt.find_all_mappings(latt2):
self.assertArrayAlmostEqual(np.inner(latt2.matrix, rot_out),
aligned_out.matrix, 5)
self.assertArrayAlmostEqual(np.dot(scale_out, latt.matrix),
aligned_out.matrix)
self.assertArrayAlmostEqual(aligned_out.parameters, latt2.parameters)
self.assertFalse(np.allclose(aligned_out.parameters, latt.parameters))
latt = Lattice.orthorhombic(9, 9, 5)
self.assertEqual(len(list(latt.find_all_mappings(latt))), 16)
# catch the singular matrix error
latt = Lattice.from_parameters(1, 1, 1, 10, 10, 10)
for l, _, _ in latt.find_all_mappings(latt, ltol=0.05, atol=11):
self.assertTrue(isinstance(l, Lattice))
def test_mapping_symmetry(self):
l = Lattice.cubic(1)
l2 = Lattice.orthorhombic(1.1001, 1, 1)
self.assertEqual(l.find_mapping(l2, ltol=0.1), None)
self.assertEqual(l2.find_mapping(l, ltol=0.1), None)
l2 = Lattice.orthorhombic(1.0999, 1, 1)
self.assertNotEqual(l2.find_mapping(l, ltol=0.1), None)
self.assertNotEqual(l.find_mapping(l2, ltol=0.1), None)
def test_to_from_dict(self):
d = self.tetragonal.as_dict()
t = Lattice.from_dict(d)
for i in range(3):
self.assertEqual(t.abc[i], self.tetragonal.abc[i])
self.assertEqual(t.angles[i], self.tetragonal.angles[i])
# Make sure old style dicts work.
d = self.tetragonal.as_dict(verbosity=1)
del d["matrix"]
t = Lattice.from_dict(d)
for i in range(3):
self.assertEqual(t.abc[i], self.tetragonal.abc[i])
self.assertEqual(t.angles[i], self.tetragonal.angles[i])
def test_scale(self):
new_volume = 10
for (family_name, lattice) in self.families.items():
new_lattice = lattice.scale(new_volume)
self.assertAlmostEqual(new_lattice.volume, new_volume)
self.assertArrayAlmostEqual(new_lattice.angles, lattice.angles)
def test_get_wigner_seitz_cell(self):
ws_cell = Lattice([[10, 0, 0], [0, 5, 0], [0, 0, 1]]) \
.get_wigner_seitz_cell()
self.assertEqual(6, len(ws_cell))
for l in ws_cell[3]:
self.assertEqual([abs(i) for i in l], [5.0, 2.5, 0.5])
def test_dot_and_norm(self):
frac_basis = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
for family_name, lattice in self.families.items():
# print(family_name)
self.assertArrayEqual(lattice.norm(lattice.matrix, frac_coords=False), lattice.abc)
self.assertArrayEqual(lattice.norm(frac_basis), lattice.abc)
for (i, vec) in enumerate(frac_basis):
length = lattice.norm(vec)
self.assertArrayEqual(length[0], lattice.abc[i])
# We always get a ndarray.
self.assertTrue(hasattr(length, "shape"))
# Passing complex arrays should raise TypeError
with self.assertRaises(TypeError):
lattice.norm(np.zeros(3, dtype=np.complex))
# Cannot reshape the second argument.
with self.assertRaises(ValueError):
lattice.dot(np.zeros(6), np.zeros(8))
# Passing vectors of different length is invalid.
with self.assertRaises(ValueError):
lattice.dot(np.zeros(3), np.zeros(6))
def test_get_points_in_sphere(self):
# This is a non-niggli representation of a cubic lattice
latt = Lattice([[1, 5, 0], [0, 1, 0], [5, 0, 1]])
# evenly spaced points array between 0 and 1
pts = np.array(list(itertools.product(range(5), repeat=3))) / 5
pts = latt.get_fractional_coords(pts)
# Test getting neighbors within 1 neighbor distance of the origin
fcoords, dists, inds, images = latt.get_points_in_sphere(pts, [0, 0, 0], 0.20001,
zip_results=False)
self.assertEqual(len(fcoords), 7) # There are 7 neighbors
self.assertEqual(np.isclose(dists, 0.2).sum(), 6) # 6 are at 0.2
self.assertEqual(np.isclose(dists, 0).sum(), 1) # 1 is at 0
self.assertEqual(len(set(inds)), 7) # They have unique indices
self.assertArrayEqual(images[np.isclose(dists, 0)], [[0, 0, 0]])
# More complicated case, using the zip output
result = latt.get_points_in_sphere(pts, [0.5, 0.5, 0.5], 1.0001)
self.assertEqual(len(result), 552)
self.assertEqual(len(result[0]), 4) # coords, dists, ind, supercell
def test_get_all_distances(self):
fcoords = np.array([[0.3, 0.3, 0.5],
[0.1, 0.1, 0.3],
[0.9, 0.9, 0.8],
[0.1, 0.0, 0.5],
[0.9, 0.7, 0.0]])
lattice = Lattice.from_parameters(8, 8, 4, 90, 76, 58)
expected = np.array([[0.000, 3.015, 4.072, 3.519, 3.245],
[3.015, 0.000, 3.207, 1.131, 4.453],
[4.072, 3.207, 0.000, 2.251, 1.788],
[3.519, 1.131, 2.251, 0.000, 3.852],
[3.245, 4.453, 1.788, 3.852, 0.000]])
output = lattice.get_all_distances(fcoords, fcoords)
self.assertArrayAlmostEqual(output, expected, 3)
# test just one input point
output2 = lattice.get_all_distances(fcoords[0], fcoords)
self.assertArrayAlmostEqual(output2, [expected[0]], 2)
# test distance when initial points are not in unit cell
f1 = [0, 0, 17]
f2 = [0, 0, 10]
self.assertEqual(lattice.get_all_distances(f1, f2)[0, 0], 0)
def test_monoclinic(self):
a, b, c, alpha, beta, gamma = self.monoclinic.parameters
self.assertNotAlmostEqual(beta, 90)
self.assertAlmostEqual(alpha, 90)
self.assertAlmostEqual(gamma, 90)
def test_is_hexagonal(self):
self.assertFalse(self.cubic.is_hexagonal())
self.assertFalse(self.tetragonal.is_hexagonal())
self.assertFalse(self.orthorhombic.is_hexagonal())
self.assertFalse(self.monoclinic.is_hexagonal())
self.assertFalse(self.rhombohedral.is_hexagonal())
self.assertTrue(self.hexagonal.is_hexagonal())
def test_get_distance_and_image(self):
dist, image = self.cubic.get_distance_and_image([0, 0, 0.1],
[0, 0., 0.9])
self.assertAlmostEqual(dist, 2)
self.assertArrayAlmostEqual(image, [0, 0, -1])
# def test_get_distance_and_image_strict(self):
# for count in range(10):
# lengths = [np.random.randint(1, 100) for i in range(3)]
# lattice = [np.random.rand(3) * lengths[i]
# for i in range(3)]
# lattice = Lattice(np.array(lattice))
#
# f1 = np.random.rand(3)
# f2 = np.random.rand(3)
#
# scope = list(range(-3, 4))
# min_image_dist = (float("inf"), None)
# for image in itertools.product(scope, scope, scope):
# cart = lattice.get_cartesian_coords(f1 - (f2 + image))
# dist = np.dot(cart, cart) ** 0.5
# if dist < min_image_dist[0]:
# min_image_dist = (dist, image)
#
# pmg_result = lattice.get_distance_and_image(f1, f2)
# self.assertGreaterEqual(min_image_dist[0] + 1e-7, pmg_result[0])
# if abs(min_image_dist[0] - pmg_result[0]) < 1e-12:
# self.assertArrayAlmostEqual(min_image_dist[1], pmg_result[1])
def test_lll_basis(self):
a = np.array([1., 0.1, 0.])
b = np.array([0., 2., 0.])
c = np.array([0., 0., 3.])
l1 = Lattice([a, b, c])
l2 = Lattice([a + b, b + c, c])
ccoords = np.array([[1, 1, 2], [2, 2, 1.5]])
l1_fcoords = l1.get_fractional_coords(ccoords)
l2_fcoords = l2.get_fractional_coords(ccoords)
self.assertArrayAlmostEqual(l1.matrix, l2.lll_matrix)
self.assertArrayAlmostEqual(np.dot(l2.lll_mapping, l2.matrix),
l1.matrix)
self.assertArrayAlmostEqual(np.dot(l2_fcoords, l2.matrix),
np.dot(l1_fcoords, l1.matrix))
lll_fcoords = l2.get_lll_frac_coords(l2_fcoords)
self.assertArrayAlmostEqual(lll_fcoords, l1_fcoords)
self.assertArrayAlmostEqual(l1.get_cartesian_coords(lll_fcoords),
np.dot(lll_fcoords, l2.lll_matrix))
self.assertArrayAlmostEqual(l2.get_frac_coords_from_lll(lll_fcoords),
l2_fcoords)
def test_get_miller_index_from_sites(self):
# test on a cubic system
m = Lattice.cubic(1)
s1 = np.array([0.5, -1.5, 3])
s2 = np.array([0.5, 3., -1.5])
s3 = np.array([2.5, 1.5, -4.])
self.assertEqual(m.get_miller_index_from_coords([s1, s2, s3]),
(2, 1, 1))
# test on a hexagonal system
m = Lattice([[2.319, -4.01662582, 0.],
[2.319, 4.01662582, 0.],
[0., 0., 7.252]])
s1 = np.array([2.319, 1.33887527, 6.3455])
s2 = np.array([1.1595, 0.66943764, 4.5325])
s3 = np.array([1.1595, 0.66943764, 0.9065])
hkl = m.get_miller_index_from_coords([s1, s2, s3])
self.assertEqual(hkl, (2, -1, 0))
# test for previous failing structure
m = Lattice([10, 0, 0, 0, 10, 0, 0, 0, 10])
sites = [[0.5, 0.8, 0.8], [0.5, 0.4, 0.2], [0.5, 0.3, 0.7]]
hkl = m.get_miller_index_from_coords(sites, coords_are_cartesian=False)
self.assertEqual(hkl, (1, 0, 0))
# test for more than 3 sites
sites = [[0.5, 0.8, 0.8], [0.5, 0.4, 0.2], [0.5, 0.3, 0.7],
[0.5, 0.1, 0.2]]
hkl = m.get_miller_index_from_coords(sites, coords_are_cartesian=False)
self.assertEqual(hkl, (1, 0, 0))
def test_points_in_spheres(self):
points = [[0., 0., 0.], [2., 2., 2.]]
lattice = Lattice.cubic(3)
center_points = [[1.5, 1.5, 1.5]]
nns = get_points_in_spheres(all_coords=np.array(points), center_coords=np.array(center_points), r=3,
pbc=np.array([0, 0, 0], dtype=int), lattice=lattice, numerical_tol=1e-8)
self.assertEqual(len(nns[0]), 2) # two neighbors
nns = get_points_in_spheres(all_coords=np.array(points), center_coords=np.array(center_points), r=3,
pbc=[1, 1, 1],
lattice=lattice, numerical_tol=1e-8, return_fcoords=True)
self.assertEqual(len(nns[0]), 12)
nns = get_points_in_spheres(all_coords=np.array(points), center_coords=np.array(center_points), r=3,
pbc=np.array([True, False, False], dtype=int),
lattice=lattice)
self.assertEqual(len(nns[0]), 4)
if __name__ == '__main__':
import unittest
unittest.main()
| 44.927757
| 116
| 0.567112
|
95112fe24f5a62e3491192a8182c801d57769c09
| 1,976
|
py
|
Python
|
Python/tautology.py
|
BuserLukas/Logic
|
cc0447554cfa75b213a10a2db37ce82c42afb91d
|
[
"MIT"
] | 13
|
2019-10-03T13:25:02.000Z
|
2021-12-26T11:49:25.000Z
|
Python/tautology.py
|
BuserLukas/Logic
|
cc0447554cfa75b213a10a2db37ce82c42afb91d
|
[
"MIT"
] | 19
|
2015-01-14T15:36:24.000Z
|
2019-04-21T02:13:23.000Z
|
Python/tautology.py
|
BuserLukas/Logic
|
cc0447554cfa75b213a10a2db37ce82c42afb91d
|
[
"MIT"
] | 18
|
2019-10-03T16:05:46.000Z
|
2021-12-10T19:44:15.000Z
|
import propLogParser as plp
def power(M):
"Compute a list containing all subsets of the set M"
if M == set():
return [ set() ]
x = M.pop()
L = power(M)
return L + [ K | { x } for K in L ]
def tautology(F):
"Check, whether the formula F is a tautology"
P = collectVars(F)
A = power(P)
if { evaluate(F, I) for I in A } == { True }:
return True
else:
return [I for I in A if not evaluate(F, I)][0]
def collectVars(F):
"Collect all propositional variables occurring in the formula F"
if isinstance(F, str):
return { F }
if F[0] == '⊤' or F[0] == '⊥':
return set()
if F[0] == '¬':
return collectVars(F[1])
return collectVars(F[1]) | collectVars(F[2])
def evaluate(F, I):
"Evaluate the propositional formula F using the interpretation I"
if F[0] == '⊤': return True
if F[0] == '⊥': return False
if isinstance(F, str):
return F in I
if F[0] == '¬': return not evaluate(F[1], I)
if F[0] == '∧': return evaluate(F[1], I) and evaluate(F[2], I)
if F[0] == '∨': return evaluate(F[1], I) or evaluate(F[2], I)
if F[0] == '→': return not evaluate(F[1], I) or evaluate(F[2], I)
if F[0] == '↔': return evaluate(F[1], I) == evaluate(F[2], I)
if F[0] == '⊕': return evaluate(F[1], I) != evaluate(F[2], I)
def test(s):
F = plp.LogicParser(s).parse()
counterExample = tautology(F);
if counterExample == True:
print('The formula', s, 'is a tautology.')
else:
P = collectVars(F)
print('The formula ', s, ' is not a tautology.')
print('Counter example: ')
for x in P:
if x in counterExample:
print(x, "↦ True")
else:
print(x, "↦ False")
if __name__ == "__main__":
test('¬(p ∨ q) ↔ ¬p ∧ ¬q')
test('(p → q) → (¬p → q) → q')
test('(p → q) → (¬p → ¬q)')
test('(p ⊕ q) ↔ ¬(p ↔ q)')
test('¬p ↔ (p → ⊥)')
| 30.4
| 69
| 0.510628
|
dcbd9ca692faf70983f93b43fbb00b71499619c7
| 6,869
|
py
|
Python
|
custom/icds_reports/reports/stadiometer.py
|
rochakchauhan/commcare-hq
|
aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236
|
[
"BSD-3-Clause"
] | null | null | null |
custom/icds_reports/reports/stadiometer.py
|
rochakchauhan/commcare-hq
|
aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236
|
[
"BSD-3-Clause"
] | 1
|
2021-06-02T04:45:16.000Z
|
2021-06-02T04:45:16.000Z
|
custom/icds_reports/reports/stadiometer.py
|
rochakchauhan/commcare-hq
|
aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236
|
[
"BSD-3-Clause"
] | null | null | null |
from collections import OrderedDict, defaultdict
from datetime import datetime
from dateutil.relativedelta import relativedelta
from dateutil.rrule import MONTHLY, rrule
from django.db.models.aggregates import Sum
from custom.icds_reports.cache import icds_quickcache
from custom.icds_reports.const import LocationTypes, ChartColors, MapColors
from custom.icds_reports.messages import awcs_reported_stadiometer_text
from custom.icds_reports.models import AggAwcMonthly
from custom.icds_reports.utils import apply_exclude, generate_data_for_map, indian_formatted_number
@icds_quickcache(['domain', 'config', 'loc_level', 'show_test'], timeout=30 * 60)
def get_stadiometer_data_map(domain, config, loc_level, show_test=False):
def get_data_for(filters):
filters['month'] = datetime(*filters['month'])
queryset = AggAwcMonthly.objects.filter(
**filters
).values(
'%s_name' % loc_level, '%s_map_location_name' % loc_level
).annotate(
in_month=Sum('stadiometer'),
all=Sum('num_awc_infra_last_update'),
).order_by('%s_name' % loc_level, '%s_map_location_name' % loc_level)
if not show_test:
queryset = apply_exclude(domain, queryset)
return queryset
data_for_map, valid_total, in_month_total, average, total = generate_data_for_map(
get_data_for(config),
loc_level,
'in_month',
'all',
25,
75
)
fills = OrderedDict()
fills.update({'0%-25%': MapColors.RED})
fills.update({'25%-75%': MapColors.ORANGE})
fills.update({'75%-100%': MapColors.PINK})
fills.update({'defaultFill': MapColors.GREY})
return {
"slug": "stadiometer",
"label": "Percentage of AWCs that reported having a Stadiometer",
"fills": fills,
"rightLegend": {
"average": average,
"info": awcs_reported_stadiometer_text(),
"extended_info": [
{
'indicator': (
'Total number of AWCs with a Stadiometer:'
),
'value': indian_formatted_number(in_month_total)
},
{
'indicator': (
'% of AWCs with a Stadiometer:'
),
'value': '%.2f%%' % (in_month_total * 100 / float(valid_total or 1))
}
]
},
"data": dict(data_for_map),
}
@icds_quickcache(['domain', 'config', 'loc_level', 'show_test'], timeout=30 * 60)
def get_stadiometer_data_chart(domain, config, loc_level, show_test=False):
month = datetime(*config['month'])
three_before = datetime(*config['month']) - relativedelta(months=3)
config['month__range'] = (three_before, month)
del config['month']
chart_data = AggAwcMonthly.objects.filter(
**config
).values(
'month', '%s_name' % loc_level
).annotate(
in_month=Sum('stadiometer'),
all=Sum('num_awc_infra_last_update'),
).order_by('month')
if not show_test:
chart_data = apply_exclude(domain, chart_data)
data = {
'blue': OrderedDict(),
}
dates = [dt for dt in rrule(MONTHLY, dtstart=three_before, until=month)]
for date in dates:
miliseconds = int(date.strftime("%s")) * 1000
data['blue'][miliseconds] = {'y': 0, 'all': 0, 'in_month': 0}
best_worst = defaultdict(lambda: {
'in_month': 0,
'all': 0
})
for row in chart_data:
date = row['month']
in_month = (row['in_month'] or 0)
location = row['%s_name' % loc_level]
valid = row['all']
best_worst[location]['in_month'] = in_month
best_worst[location]['all'] = (valid or 0)
date_in_miliseconds = int(date.strftime("%s")) * 1000
data['blue'][date_in_miliseconds]['all'] += (valid or 0)
data['blue'][date_in_miliseconds]['in_month'] += in_month
all_locations = [
{
'loc_name': key,
'percent': (value['in_month'] * 100) / float(value['all'] or 1)
}
for key, value in best_worst.items()
]
all_locations_sorted_by_name = sorted(all_locations, key=lambda x: x['loc_name'])
all_locations_sorted_by_percent_and_name = sorted(
all_locations_sorted_by_name, key=lambda x: x['percent'], reverse=True)
return {
"chart_data": [
{
"values": [
{
'x': key,
'y': value['in_month'] / float(value['all'] or 1),
'in_month': value['in_month']
} for key, value in data['blue'].items()
],
"key": "Percentage of AWCs that reported having a Stadiometer",
"strokeWidth": 2,
"classed": "dashed",
"color": ChartColors.BLUE
}
],
"all_locations": all_locations_sorted_by_percent_and_name,
"top_five": all_locations_sorted_by_percent_and_name[:5],
"bottom_five": all_locations_sorted_by_percent_and_name[-5:],
"location_type": loc_level.title() if loc_level != LocationTypes.SUPERVISOR else 'Sector'
}
@icds_quickcache(['domain', 'config', 'loc_level', 'location_id', 'show_test'], timeout=30 * 60)
def get_stadiometer_sector_data(domain, config, loc_level, location_id, show_test=False):
group_by = ['%s_name' % loc_level]
config['month'] = datetime(*config['month'])
data = AggAwcMonthly.objects.filter(
**config
).values(
*group_by
).annotate(
in_month=Sum('stadiometer'),
all=Sum('num_awc_infra_last_update')
).order_by('%s_name' % loc_level)
if not show_test:
data = apply_exclude(domain, data)
chart_data = {
'blue': [],
}
tooltips_data = defaultdict(lambda: {
'in_month': 0,
'all': 0
})
for row in data:
valid = row['all']
name = row['%s_name' % loc_level]
in_month = row['in_month']
row_values = {
'in_month': in_month or 0,
'all': valid or 0
}
for prop, value in row_values.items():
tooltips_data[name][prop] += value
value = (in_month or 0) / float(valid or 1)
chart_data['blue'].append([
name, value
])
chart_data['blue'] = sorted(chart_data['blue'])
return {
"tooltips_data": dict(tooltips_data),
"info": awcs_reported_stadiometer_text(),
"chart_data": [
{
"values": chart_data['blue'],
"key": "",
"strokeWidth": 2,
"classed": "dashed",
"color": MapColors.BLUE
},
]
}
| 31.800926
| 99
| 0.572136
|
44da136be9a9ba52d1f0da2cb3745ecc39c2ddb3
| 10,840
|
py
|
Python
|
var/spack/repos/builtin/packages/r/package.py
|
carlabguillen/spack
|
7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1
|
2021-07-03T22:53:51.000Z
|
2021-07-03T22:53:51.000Z
|
var/spack/repos/builtin/packages/r/package.py
|
carlabguillen/spack
|
7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 3
|
2022-03-09T09:15:39.000Z
|
2022-03-09T09:15:42.000Z
|
var/spack/repos/builtin/packages/r/package.py
|
carlabguillen/spack
|
7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2020-01-10T18:54:54.000Z
|
2021-07-03T22:57:16.000Z
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
from spack import *
class R(AutotoolsPackage):
"""R is 'GNU S', a freely available language and environment for
statistical computing and graphics which provides a wide variety of
statistical and graphical techniques: linear and nonlinear modelling,
statistical tests, time series analysis, classification, clustering, etc.
Please consult the R project homepage for further information."""
homepage = "https://www.r-project.org"
url = "https://cloud.r-project.org/src/base/R-3/R-3.4.3.tar.gz"
extendable = True
version('4.0.2', sha256='d3bceab364da0876625e4097808b42512395fdf41292f4915ab1fd257c1bbe75')
version('4.0.1', sha256='95fe24a4d8d8f8f888460c8f5fe4311cec656e7a1722d233218bc03861bc6f32')
version('4.0.0', sha256='06beb0291b569978484eb0dcb5d2339665ec745737bdfb4e873e7a5a75492940')
version('3.6.3', sha256='89302990d8e8add536e12125ec591d6951022cf8475861b3690bc8bf1cefaa8f')
version('3.6.2', sha256='bd65a45cddfb88f37370fbcee4ac8dd3f1aebeebe47c2f968fd9770ba2bbc954')
version('3.6.1', sha256='5baa9ebd3e71acecdcc3da31d9042fb174d55a42829f8315f2457080978b1389')
version('3.6.0', sha256='36fcac3e452666158e62459c6fc810adc247c7109ed71c5b6c3ad5fc2bf57509')
version('3.5.3', sha256='2bfa37b7bd709f003d6b8a172ddfb6d03ddd2d672d6096439523039f7a8e678c')
version('3.5.2', sha256='e53d8c3cf20f2b8d7a9c1631b6f6a22874506fb392034758b3bb341c586c5b62')
version('3.5.1', sha256='0463bff5eea0f3d93fa071f79c18d0993878fd4f2e18ae6cf22c1639d11457ed')
version('3.5.0', sha256='fd1725535e21797d3d9fea8963d99be0ba4c3aecadcf081b43e261458b416870')
version('3.4.4', sha256='b3e97d2fab7256d1c655c4075934725ba1cd7cb9237240a11bb22ccdad960337')
version('3.4.3', sha256='7a3cb831de5b4151e1f890113ed207527b7d4b16df9ec6b35e0964170007f426')
version('3.4.2', sha256='971e30c2436cf645f58552905105d75788bd9733bddbcb7c4fbff4c1a6d80c64')
version('3.4.1', sha256='02b1135d15ea969a3582caeb95594a05e830a6debcdb5b85ed2d5836a6a3fc78')
version('3.4.0', sha256='288e9ed42457c47720780433b3d5c3c20983048b789291cc6a7baa11f9428b91')
version('3.3.3', sha256='5ab768053a275084618fb669b4fbaadcc39158998a87e8465323829590bcfc6c')
version('3.3.2', sha256='d294ad21e9f574fb4828ebb3a94b8cb34f4f304a41687a994be00dd41a4e514c')
version('3.3.1', sha256='3dc59ae5831f5380f83c169bac2103ad052efe0ecec4ffa74bde4d85a0fda9e2')
version('3.3.0', sha256='9256b154b1a5993d844bee7b1955cd49c99ad72cef03cce3cd1bdca1310311e4')
version('3.2.5', sha256='60745672dce5ddc201806fa59f6d4e0ba6554d8ed78d0f9f0d79a629978f80b5')
version('3.2.3', sha256='b93b7d878138279234160f007cb9b7f81b8a72c012a15566e9ec5395cfd9b6c1')
version('3.2.2', sha256='9c9152e74134b68b0f3a1c7083764adc1cb56fd8336bec003fd0ca550cd2461d')
version('3.2.1', sha256='d59dbc3f04f4604a5cf0fb210b8ea703ef2438b3ee65fd5ab536ec5234f4c982')
version('3.2.0', sha256='f5ae953f18ba6f3d55b46556bbbf73441350f9fd22625402b723a2b81ff64f35')
version('3.1.3', sha256='07e98323935baa38079204bfb9414a029704bb9c0ca5ab317020ae521a377312')
version('3.1.2', sha256='bcd150afcae0e02f6efb5f35a6ab72432be82e849ec52ce0bb89d8c342a8fa7a')
variant('external-lapack', default=False,
description='Links to externally installed BLAS/LAPACK')
variant('X', default=False,
description='Enable X11 support (call configure --with-x)')
variant('memory_profiling', default=False,
description='Enable memory profiling')
variant('rmath', default=False,
description='Build standalone Rmath library')
# Virtual dependencies
depends_on('blas', when='+external-lapack')
depends_on('lapack', when='+external-lapack')
# Concrete dependencies.
depends_on('readline')
depends_on('ncurses')
depends_on('icu4c')
depends_on('glib')
depends_on('zlib@1.2.5:')
depends_on('bzip2')
depends_on('libtiff')
depends_on('jpeg')
depends_on('cairo+pdf')
depends_on('cairo+X', when='+X')
depends_on('cairo~X', when='~X')
depends_on('pango')
depends_on('pango+X', when='+X')
depends_on('pango~X', when='~X')
depends_on('freetype')
depends_on('tcl')
depends_on('tk', when='+X')
depends_on('libx11', when='+X')
depends_on('libxt', when='+X')
depends_on('libxmu', when='+X')
depends_on('curl')
depends_on('pcre2', when='@4:')
depends_on('pcre', when='@:3.6.3')
depends_on('java')
patch('zlib.patch', when='@:3.3.2')
# R cannot be built with '-O2' optimization
# with Fujitsu Compiler @4.1.0 now.
# Until the Fujitsu compiler resolves this problem,
# temporary fix to lower the optimization level.
patch('change_optflags_tmp.patch', when='%fj@4.1.0')
# R custom URL version
def url_for_version(self, version):
"""Handle R's customed URL versions"""
url = 'https://cloud.r-project.org/src/base'
return url + '/R-%s/R-%s.tar.gz' % (version.up_to(1), version)
filter_compiler_wrappers(
'Makeconf', relative_root=os.path.join('rlib', 'R', 'etc')
)
@property
def etcdir(self):
return join_path(prefix, 'rlib', 'R', 'etc')
@run_after('build')
def build_rmath(self):
if '+rmath' in self.spec:
with working_dir('src/nmath/standalone'):
make()
@run_after('install')
def install_rmath(self):
if '+rmath' in self.spec:
with working_dir('src/nmath/standalone'):
make('install')
def configure_args(self):
spec = self.spec
prefix = self.prefix
tcl_config_path = join_path(spec['tcl'].prefix.lib, 'tclConfig.sh')
if not os.path.exists(tcl_config_path):
tcl_config_path = join_path(spec['tcl'].prefix,
'lib64', 'tclConfig.sh')
config_args = [
'--libdir={0}'.format(join_path(prefix, 'rlib')),
'--enable-R-shlib',
'--enable-BLAS-shlib',
'--enable-R-framework=no',
'--without-recommended-packages',
'--with-tcl-config={0}'.format(tcl_config_path),
'LDFLAGS=-L{0} -Wl,-rpath,{0}'.format(join_path(prefix, 'rlib',
'R', 'lib')),
]
if '^tk' in spec:
tk_config_path = join_path(spec['tk'].prefix.lib, 'tkConfig.sh')
if not os.path.exists(tk_config_path):
tk_config_path = join_path(spec['tk'].prefix,
'lib64', 'tkConfig.sh')
config_args.append('--with-tk-config={0}'.format(tk_config_path))
if '+external-lapack' in spec:
if '^mkl' in spec and 'gfortran' in self.compiler.fc:
mkl_re = re.compile(r'(mkl_)intel(_i?lp64\b)')
config_args.extend([
mkl_re.sub(r'\g<1>gf\g<2>',
'--with-blas={0}'.format(
spec['blas'].libs.ld_flags)),
'--with-lapack'
])
else:
config_args.extend([
'--with-blas={0}'.format(spec['blas'].libs.ld_flags),
'--with-lapack'
])
if '+X' in spec:
config_args.append('--with-x')
else:
config_args.append('--without-x')
if '+memory_profiling' in spec:
config_args.append('--enable-memory-profiling')
# Set FPICFLAGS for compilers except 'gcc'.
if self.compiler.name != 'gcc':
config_args.append('FPICFLAGS={0}'.format(
self.compiler.cc_pic_flag))
return config_args
@run_after('install')
def copy_makeconf(self):
# Make a copy of Makeconf because it will be needed to properly build R
# dependencies in Spack.
src_makeconf = join_path(self.etcdir, 'Makeconf')
dst_makeconf = join_path(self.etcdir, 'Makeconf.spack')
install(src_makeconf, dst_makeconf)
# ========================================================================
# Set up environment to make install easy for R extensions.
# ========================================================================
@property
def r_lib_dir(self):
return join_path('rlib', 'R', 'library')
def setup_dependent_build_environment(self, env, dependent_spec):
# Set R_LIBS to include the library dir for the
# extension and any other R extensions it depends on.
r_libs_path = []
for d in dependent_spec.traverse(
deptype=('build', 'run'), deptype_query='run'):
if d.package.extends(self.spec):
r_libs_path.append(join_path(d.prefix, self.r_lib_dir))
r_libs_path = ':'.join(r_libs_path)
env.set('R_LIBS', r_libs_path)
env.set('R_MAKEVARS_SITE',
join_path(self.etcdir, 'Makeconf.spack'))
# Use the number of make_jobs set in spack. The make program will
# determine how many jobs can actually be started.
env.set('MAKEFLAGS', '-j{0}'.format(make_jobs))
def setup_dependent_run_environment(self, env, dependent_spec):
# For run time environment set only the path for dependent_spec and
# prepend it to R_LIBS
if dependent_spec.package.extends(self.spec):
env.prepend_path('R_LIBS', join_path(
dependent_spec.prefix, self.r_lib_dir))
def setup_run_environment(self, env):
env.prepend_path('LIBRARY_PATH',
join_path(self.prefix, 'rlib', 'R', 'lib'))
env.prepend_path('LD_LIBRARY_PATH',
join_path(self.prefix, 'rlib', 'R', 'lib'))
env.prepend_path('CPATH',
join_path(self.prefix, 'rlib', 'R', 'include'))
def setup_dependent_package(self, module, dependent_spec):
"""Called before R modules' install() methods. In most cases,
extensions will only need to have one line:
R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
self.stage.source_path)"""
# R extension builds can have a global R executable function
module.R = Executable(join_path(self.spec.prefix.bin, 'R'))
# Add variable for library directry
module.r_lib_dir = join_path(dependent_spec.prefix, self.r_lib_dir)
# Make the site packages directory for extensions, if it does not exist
# already.
if dependent_spec.package.is_extension:
mkdirp(module.r_lib_dir)
| 44.065041
| 95
| 0.642066
|
4f0dcb16f634f353e6d50ca952156424418dbe91
| 6,257
|
py
|
Python
|
sdk/python/pulumi_azure/dns/ptr_record.py
|
apollo2030/pulumi-azure
|
034665c61665f4dc7e291b8813747012d34fa044
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/dns/ptr_record.py
|
apollo2030/pulumi-azure
|
034665c61665f4dc7e291b8813747012d34fa044
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/dns/ptr_record.py
|
apollo2030/pulumi-azure
|
034665c61665f4dc7e291b8813747012d34fa044
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class PtrRecord(pulumi.CustomResource):
fqdn: pulumi.Output[str]
"""
The FQDN of the DNS PTR Record.
"""
name: pulumi.Output[str]
"""
The name of the DNS PTR Record.
"""
records: pulumi.Output[list]
"""
List of Fully Qualified Domain Names.
"""
resource_group_name: pulumi.Output[str]
"""
Specifies the resource group where the resource exists. Changing this forces a new resource to be created.
"""
tags: pulumi.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
ttl: pulumi.Output[float]
"""
The Time To Live (TTL) of the DNS record in seconds.
"""
zone_name: pulumi.Output[str]
"""
Specifies the DNS Zone where the DNS Zone (parent resource) exists. Changing this forces a new resource to be created.
"""
def __init__(__self__, resource_name, opts=None, name=None, records=None, resource_group_name=None, tags=None, ttl=None, zone_name=None, __props__=None, __name__=None, __opts__=None):
"""
Enables you to manage DNS PTR Records within Azure DNS.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: The name of the DNS PTR Record.
:param pulumi.Input[list] records: List of Fully Qualified Domain Names.
:param pulumi.Input[str] resource_group_name: Specifies the resource group where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[float] ttl: The Time To Live (TTL) of the DNS record in seconds.
:param pulumi.Input[str] zone_name: Specifies the DNS Zone where the DNS Zone (parent resource) exists. Changing this forces a new resource to be created.
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/dns_ptr_record.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['name'] = name
if records is None:
raise TypeError("Missing required property 'records'")
__props__['records'] = records
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['tags'] = tags
if ttl is None:
raise TypeError("Missing required property 'ttl'")
__props__['ttl'] = ttl
if zone_name is None:
raise TypeError("Missing required property 'zone_name'")
__props__['zone_name'] = zone_name
__props__['fqdn'] = None
super(PtrRecord, __self__).__init__(
'azure:dns/ptrRecord:PtrRecord',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, fqdn=None, name=None, records=None, resource_group_name=None, tags=None, ttl=None, zone_name=None):
"""
Get an existing PtrRecord resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] fqdn: The FQDN of the DNS PTR Record.
:param pulumi.Input[str] name: The name of the DNS PTR Record.
:param pulumi.Input[list] records: List of Fully Qualified Domain Names.
:param pulumi.Input[str] resource_group_name: Specifies the resource group where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[float] ttl: The Time To Live (TTL) of the DNS record in seconds.
:param pulumi.Input[str] zone_name: Specifies the DNS Zone where the DNS Zone (parent resource) exists. Changing this forces a new resource to be created.
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/dns_ptr_record.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["fqdn"] = fqdn
__props__["name"] = name
__props__["records"] = records
__props__["resource_group_name"] = resource_group_name
__props__["tags"] = tags
__props__["ttl"] = ttl
__props__["zone_name"] = zone_name
return PtrRecord(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 48.130769
| 187
| 0.668531
|
fa970328743c5b58e3b947d5d5c2776ce8024ab8
| 3,741
|
py
|
Python
|
pysmartnode/components/machine/remoteConfig.py
|
Nabla128k/pysmartnode
|
a0998ad6582a28fe5a0529fb15dd4f61e254d25f
|
[
"MIT"
] | 82
|
2018-07-24T18:30:41.000Z
|
2022-03-10T06:13:15.000Z
|
pysmartnode/components/machine/remoteConfig.py
|
Nabla128k/pysmartnode
|
a0998ad6582a28fe5a0529fb15dd4f61e254d25f
|
[
"MIT"
] | 15
|
2020-02-08T21:20:09.000Z
|
2021-03-15T09:13:28.000Z
|
pysmartnode/components/machine/remoteConfig.py
|
Nabla128k/pysmartnode
|
a0998ad6582a28fe5a0529fb15dd4f61e254d25f
|
[
"MIT"
] | 23
|
2018-07-24T18:30:42.000Z
|
2021-12-12T10:03:44.000Z
|
# Author: Kevin Köck
# Copyright Kevin Köck 2019-2020 Released under the MIT license
# Created on 2019-09-15
__updated__ = "2020-08-11"
__version__ = "0.93"
from pysmartnode.utils.component import ComponentBase
from pysmartnode import config
from pysmartnode import logging
import uasyncio as asyncio
from sys import platform
import gc
import os
COMPONENT_NAME = "remoteConfig"
_mqtt = config.getMQTT()
_log = logging.getLogger(COMPONENT_NAME)
# SPIRAM is very slow when importing modules
WAIT = 1.5 if platform == "esp8266" else (
0.5 if os.uname() == "posix" or "(spiram)" not in os.uname().machine else 3)
class RemoteConfig(ComponentBase):
def __init__(self, **kwargs):
super().__init__(COMPONENT_NAME, __version__, unit_index=0, logger=_log, **kwargs)
self._topic = "{!s}/login/{!s}/#".format(_mqtt.mqtt_home, _mqtt.client_id)
self._icomp = None
self._rcomp = []
self._done = False
self._watcher_task = asyncio.create_task(self._watcher())
def done(self):
return self._done
async def _watcher(self):
mqtt = _mqtt
mqtt.subscribeSync(self._topic, self.on_message, self)
try:
while True:
while mqtt.isconnected() is False:
await asyncio.sleep(1)
if await mqtt.awaitSubscriptionsDone(await_connection=False):
_log.debug("waiting for config", local_only=True)
await _mqtt.publish(
"{!s}/login/{!s}/set".format(mqtt.mqtt_home, mqtt.client_id),
[config.VERSION, platform, WAIT])
gc.collect()
else:
await asyncio.sleep(20)
continue
for _ in range(120):
if mqtt.isconnected() is False:
break
await asyncio.sleep(1)
# so that it can be cancelled properly every second
except asyncio.CancelledError:
if config.DEBUG is True:
_log.debug("_watcher cancelled", local_only=True)
except Exception as e:
await _log.asyncLog("error", "Error watching remoteConfig:", e)
finally:
await mqtt.unsubscribe(self._topic, self)
self._done = True
def _saveComponent(self, name, data):
pass
# save if save is enabled
async def on_message(self, topic, msg, retain):
if retain is True:
return False
m = memoryview(topic)
if m[-4:] == b"/set":
return False
if m == memoryview(self._topic)[:-2]:
print("received amount", msg)
self._icomp = int(msg)
# no return so it can end if 0 components are expected
elif self._icomp is None:
await _log.asyncLog("error", "Need amount of components first")
return False
else:
if type(msg) != dict:
await _log.asyncLog("error", "Received config is no dict")
return False
name = topic[topic.rfind("/") + 1:]
del topic
gc.collect()
_log.info("received config for component", name, ":", msg, local_only=True)
if name in self._rcomp:
# received config already, typically happens if process was
# interrupted by network error
return False
self._rcomp.append(name)
self._saveComponent(name, msg)
await config.registerComponent(name, msg)
if len(self._rcomp) == self._icomp: # received all components
self._watcher_task.cancel()
return False
| 36.320388
| 90
| 0.57792
|
cf4545c0d96ad70a56077b248b4f7efbbcd3825c
| 441
|
py
|
Python
|
3.6/Modules/global_parameters.py
|
mew-two-github/de-Novo-drug-Design
|
c21ebf1067d10f4a1243c767adec3dd7f5af7d78
|
[
"MIT"
] | 1
|
2021-12-27T10:23:38.000Z
|
2021-12-27T10:23:38.000Z
|
3.6/Modules/global_parameters.py
|
mew-two-github/de-Novo-drug-Design
|
c21ebf1067d10f4a1243c767adec3dd7f5af7d78
|
[
"MIT"
] | null | null | null |
3.6/Modules/global_parameters.py
|
mew-two-github/de-Novo-drug-Design
|
c21ebf1067d10f4a1243c767adec3dd7f5af7d78
|
[
"MIT"
] | null | null | null |
# GLOBAL PARAMETERS
# Fragmenting and building the encoding
MOL_SPLIT_START = 70
MAX_ATOMS = 12
MAX_FREE = 3
MAX_FRAGMENTS = 12
# Similarity parameters
ETA = 0.1
# Generation parameters
MAX_SWAP = 5
FEATURES = 2
# Model parameters
N_DENSE = 128
N_DENSE2 = 32
N_LSTM = 32 # Times 2 neurons, since there are both a forward and a backward pass in the bidirectional LSTM
# RL training
GAMMA = 0.95
BATCH_SIZE = 512
EPOCHS = 300
TIMES = 8
| 15.75
| 107
| 0.746032
|
b0cfe5849cb0ceb088cd96cb6335412d5f14f44c
| 993
|
py
|
Python
|
seeq/addons/correlation/_utils/__init__.py
|
seeq12/seeq-correlation
|
ab2ed13871352dc3671f5d5df09ec3aebd5f24f5
|
[
"Apache-2.0"
] | 2
|
2021-11-17T00:17:43.000Z
|
2022-01-25T21:15:04.000Z
|
seeq/addons/correlation/_utils/__init__.py
|
seeq12/seeq-correlation
|
ab2ed13871352dc3671f5d5df09ec3aebd5f24f5
|
[
"Apache-2.0"
] | 5
|
2021-11-02T23:13:57.000Z
|
2022-02-14T21:30:32.000Z
|
seeq/addons/correlation/_utils/__init__.py
|
seeq12/seeq-correlation
|
ab2ed13871352dc3671f5d5df09ec3aebd5f24f5
|
[
"Apache-2.0"
] | null | null | null |
from ._common import validate_argument_types, print_red
from ._permissions import permissions_defaults, add_datalab_project_ace, get_user, get_user_group
from ._sdl import pull_only_signals, get_worksheet_url, get_workbook_worksheet_workstep_ids, get_worksheet_url_from_ids
from ._sdl import sanitize_sdl_url, get_datalab_project_id, check_spy_version, addon_tool_management
from ._seeq_new_content import create_condition, create_workstep_signals
_cache_max_items = 128
_user_guide = 'https://seeq12.github.io/seeq-correlation/user_guide.html'
__all__ = ['validate_argument_types', 'print_red', 'create_condition', 'create_workstep_signals',
'permissions_defaults', 'add_datalab_project_ace', 'get_user', 'get_user_group',
'pull_only_signals', 'get_worksheet_url', 'get_workbook_worksheet_workstep_ids',
'get_worksheet_url_from_ids', 'sanitize_sdl_url', 'get_datalab_project_id',
'addon_tool_management', 'check_spy_version', '_cache_max_items']
| 66.2
| 119
| 0.814703
|
d054894ae9950e2482b6a80ef7f7f5d9fbedccd7
| 264
|
py
|
Python
|
passbook/factors/captcha/apps.py
|
fossabot/passbook
|
cba17f6659404445ac3025f11657d89368cc8b4f
|
[
"MIT"
] | null | null | null |
passbook/factors/captcha/apps.py
|
fossabot/passbook
|
cba17f6659404445ac3025f11657d89368cc8b4f
|
[
"MIT"
] | null | null | null |
passbook/factors/captcha/apps.py
|
fossabot/passbook
|
cba17f6659404445ac3025f11657d89368cc8b4f
|
[
"MIT"
] | null | null | null |
"""passbook captcha app"""
from django.apps import AppConfig
class PassbookFactorCaptchaConfig(AppConfig):
"""passbook captcha app"""
name = "passbook.factors.captcha"
label = "passbook_factors_captcha"
verbose_name = "passbook Factors.Captcha"
| 24
| 45
| 0.738636
|
0967d1cf070d9ccf2bad3f3b947bba4cf3784ded
| 91
|
py
|
Python
|
Diena_10_Classes_Objects/my_prog.py
|
edzya/Python_RTU_08_20
|
d2921d998c611c18328dd523daf976a27ce858c1
|
[
"MIT"
] | 8
|
2020-08-31T16:10:54.000Z
|
2021-11-24T06:37:37.000Z
|
Diena_10_Classes_Objects/my_prog.py
|
edzya/Python_RTU_08_20
|
d2921d998c611c18328dd523daf976a27ce858c1
|
[
"MIT"
] | 8
|
2021-06-08T22:30:29.000Z
|
2022-03-12T00:48:55.000Z
|
Diena_10_Classes_Objects/my_prog.py
|
edzya/Python_RTU_08_20
|
d2921d998c611c18328dd523daf976a27ce858c1
|
[
"MIT"
] | 12
|
2020-09-28T17:06:52.000Z
|
2022-02-17T12:12:46.000Z
|
import sys
import my_util
print(my_util.add(5,7))
print(my_util.my_pi)
print(sys.path)
| 9.1
| 23
| 0.747253
|
001e9a7b9b61b7f78458a5d38d6ca20128e889b6
| 1,338
|
py
|
Python
|
dyn2sel/tests/apply_dcs/test_desdd.py
|
luccaportes/Scikit-DYN2SEL
|
3e102f4fff5696277c57997fb811139c5e6f8b4d
|
[
"MIT"
] | 1
|
2021-08-21T21:21:29.000Z
|
2021-08-21T21:21:29.000Z
|
dyn2sel/tests/apply_dcs/test_desdd.py
|
luccaportes/Scikit-DYN2SEL
|
3e102f4fff5696277c57997fb811139c5e6f8b4d
|
[
"MIT"
] | 10
|
2020-10-27T13:37:36.000Z
|
2021-09-11T02:40:51.000Z
|
dyn2sel/tests/apply_dcs/test_desdd.py
|
luccaportes/Scikit-DYN2SEL
|
3e102f4fff5696277c57997fb811139c5e6f8b4d
|
[
"MIT"
] | 1
|
2021-11-24T07:20:42.000Z
|
2021-11-24T07:20:42.000Z
|
import sys
sys.path.append("..")
from skmultiflow.data import SEAGenerator
# from skmultiflow.bayes import NaiveBayes
from skmultiflow.meta import AdaptiveRandomForest, OzaBagging
from dyn2sel.apply_dcs import DESDDMethod
def test_accuracy():
# an ensemble of Adaptive Random Forests should perform at the very least 80% with 200 instances of SEAGenerator
n_samples_train = 200
n_samples_test = 200
gen = SEAGenerator(noise_percentage=0.0)
# gen.prepare_for_use()
arf = AdaptiveRandomForest()
desdd = DESDDMethod(arf)
X_train, y_train = gen.next_sample(n_samples_train)
X_test, y_test = gen.next_sample(n_samples_test)
desdd.partial_fit(X_train, y_train)
assert desdd.score(X_test, y_test) > 0.80
# def test_drift():
# oza = OzaBagging(NaiveBayes())
# desdd = DESDDMethod(oza, max_lambda=10)
# gen = SEAGenerator(classification_function=0)
# gen.prepare_for_use()
# X_pre_drift, y_pre_drift = gen.next_sample(200)
# gen = SEAGenerator(classification_function=3)
# gen.prepare_for_use()
# X_post_drift, y_post_drift = gen.next_sample(200)
#
# desdd.partial_fit(X_pre_drift, y_pre_drift, classes=[0, 1])
# old_lambdas = desdd.ensemble.lambdas
#
# desdd.partial_fit(X_post_drift, y_post_drift)
# new_lambdas = desdd.ensemble.lambdas
# o=9
| 32.634146
| 116
| 0.731689
|
6ba2771460dd387d3334d060cbe403c92ef7615c
| 3,303
|
py
|
Python
|
brevis/predict/paper.py
|
vinayak19th/Brevis-2.0
|
a0dd211f25e2e719ae9a14f29ccd5cd6e72704aa
|
[
"MIT"
] | null | null | null |
brevis/predict/paper.py
|
vinayak19th/Brevis-2.0
|
a0dd211f25e2e719ae9a14f29ccd5cd6e72704aa
|
[
"MIT"
] | 10
|
2021-06-10T20:41:38.000Z
|
2022-03-12T00:56:18.000Z
|
brevis/predict/paper.py
|
vinayak19th/Brevis-2.0
|
a0dd211f25e2e719ae9a14f29ccd5cd6e72704aa
|
[
"MIT"
] | null | null | null |
import random
import os
import sys
import newspaper
from tools.trie import *
import requests
from bs4 import BeautifulSoup
from newsapi import NewsApiClient
# NUM_POPULAR_URLS = 3
# NUM_EACH_POPULAR = 2
# FORGET_ARTICLE = False
NUM_ARTICLES = 20
BASE_DIR = Path(__file__).resolve().parent.parent
SAVED_TRIE_DIR = BASE_DIR / 'tools' / 'savedtries'
def get_articles():
newsapi = NewsApiClient(api_key='d84cf1257d084ed3b9eec34250c389ca')
all_articles_response = newsapi.get_everything(sources='bbc-news,the-verge',
domains='bbc.co.uk,techcrunch.com',
language='en',
sort_by='relevancy')
title_content_dict = {}
articles = all_articles_response['articles']
for i in range(NUM_ARTICLES):
article = articles[i]
title = article['title']
if title in title_content_dict:
continue
html = requests.get(article['url'])
soup = BeautifulSoup(html.text, 'html.parser')
content = [p_tag.get_text() for p_tag in soup.find_all('p')]
content = '\n'.join(content)
title_content_dict[title] = {'content':content,'url':article['url']}
return title_content_dict
# def forget_articles(url):
# print(f"Forgettig {url} articles")
# domain = \
# url.replace("https://", "http://").replace("http://", "").split("/")[0]
# d_pth = os.path.join(newspaper.settings.MEMO_DIR, domain + ".txt")
# if os.path.exists(d_pth):
# os.remove(d_pth)
# def get_articles():
# word_checker = Trie()
# print(SAVED_TRIE_DIR / 'trie.pkl')
# print("Loading word checker...")
# word_checker.load(SAVED_TRIE_DIR / 'trie.pkl')
# print(f"Obtaining {NUM_POPULAR_URLS} popular URLs")
# populars = newspaper.popular_urls()[:NUM_POPULAR_URLS]
# for p in populars:
# if FORGET_ARTICLE:
# forget_articles(p)
# print(p)
# print("Building popular newspapers...")
# popular_newspaper_build = []
# for idx, p in enumerate(populars):
# print(f"Building {idx + 1} \t {p}")
# popular_newspaper_build.append(newspaper.build(p, memoize_articles = False, language='en'))
# print("Getting articles text list...")
# articles_text_list = []
# title_article_dict = {}
# for pb in popular_newspaper_build:
# size = len(pb.articles)
# print(f"{pb.brand} has {size} articles.")
# for _ in range(NUM_EACH_POPULAR):
# while True:
# index = random.randint(0, size-1)
# print(index, end = ' ')
# article = pb.articles[index]
# try:
# article.download()
# article.parse()
# title = article.title
# text = article.text
# if not text:
# raise Exception('')
# except:
# continue
# print(text, word_checker.test_corpus__(text))
# title_article_dict[title] = text
# # articles_text_list.append(text)
# break
# print()
# return title_article_dict
| 29.756757
| 101
| 0.565849
|
07bc53b4ea55c1a5648d5d5745eb90c1e9ff81bb
| 3,348
|
py
|
Python
|
mlrtvc/src/training/vocoder_train.py
|
rushic24/Multi-Language-RTVC
|
f61f79ea119d10c876bd69b825f5cb84c9b66ac8
|
[
"MIT"
] | 9
|
2021-12-19T07:41:49.000Z
|
2022-01-28T17:16:32.000Z
|
mlrtvc/src/training/vocoder_train.py
|
rushic24/Multi-Language-RTVC
|
f61f79ea119d10c876bd69b825f5cb84c9b66ac8
|
[
"MIT"
] | 13
|
2021-12-17T18:57:39.000Z
|
2022-03-04T12:22:46.000Z
|
mlrtvc/src/training/vocoder_train.py
|
rushic24/Multi-Language-RTVC
|
f61f79ea119d10c876bd69b825f5cb84c9b66ac8
|
[
"MIT"
] | 3
|
2021-12-21T23:52:55.000Z
|
2022-01-06T23:08:34.000Z
|
import sys
sys.path.append("../")
from core.utils.argutils import print_args
from core.vocoder.train import train
from pathlib import Path
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Trains the vocoder from the synthesizer audios and the GTA synthesized mels, "
"or ground truth mels.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"run_id",
type=str,
help="Name for this model instance. If a model state from the same run ID was previously "
"saved, the training will restart from there. Pass -f to overwrite saved states and "
"restart from scratch.",
)
parser.add_argument(
"datasets_root",
type=str,
help="Path to the directory containing your SV2TTS directory. Specifying --syn_dir or --voc_dir "
"will take priority over this argument.",
)
parser.add_argument(
"--syn_dir",
type=str,
default=argparse.SUPPRESS,
help="Path to the synthesizer directory that contains the ground truth mel spectrograms, "
"the wavs and the embeds. Defaults to <datasets_root>/SV2TTS/synthesizer/.",
)
parser.add_argument(
"--voc_dir",
type=str,
default=argparse.SUPPRESS,
help="Path to the vocoder directory that contains the GTA synthesized mel spectrograms. "
"Defaults to <datasets_root>/SV2TTS/vocoder/. Unused if --ground_truth is passed.",
)
parser.add_argument(
"-m",
"--models_dir",
type=str,
default="../../saved_models/",
help="Path to the directory that will contain the saved model weights, as well as backups "
"of those weights and wavs generated during training.",
)
parser.add_argument(
"-l",
"--language_code",
type=str,
default="en_US",
help="Language code for the model.",
)
parser.add_argument(
"-g",
"--ground_truth",
action="store_true",
help="Train on ground truth spectrograms (<datasets_root>/SV2TTS/synthesizer/mels).",
)
parser.add_argument(
"-s",
"--save_every",
type=int,
default=1000,
help="Number of steps between updates of the model on the disk. Set to 0 to never save the "
"model.",
)
parser.add_argument(
"-b",
"--backup_every",
type=int,
default=25000,
help="Number of steps between backups of the model. Set to 0 to never make backups of the "
"model.",
)
parser.add_argument(
"-f",
"--force_restart",
action="store_true",
help="Do not load any saved model and restart from scratch.",
)
args = parser.parse_args()
# Process the arguments
if not hasattr(args, "syn_dir"):
args.syn_dir = Path(args.datasets_root, "SV2TTS", "synthesizer")
args.syn_dir = Path(args.syn_dir)
if not hasattr(args, "voc_dir"):
args.voc_dir = Path(args.datasets_root, "SV2TTS", "vocoder")
args.voc_dir = Path(args.voc_dir)
del args.datasets_root
args.models_dir = Path(args.models_dir)
args.models_dir.mkdir(exist_ok=True)
# Run the training
print_args(args, parser)
train(**vars(args))
| 32.504854
| 105
| 0.625448
|
39ad81147ba2cad90b254f92a2ba85c4f9a8424c
| 8,662
|
py
|
Python
|
venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
|
professorbee/randomplushmiku
|
b2db186a5d081da0cb00b8c73dee9eff6047b1f1
|
[
"MIT"
] | 4
|
2021-03-29T19:15:29.000Z
|
2021-06-08T05:34:00.000Z
|
venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
|
professorbee/randomplushmiku
|
b2db186a5d081da0cb00b8c73dee9eff6047b1f1
|
[
"MIT"
] | 14
|
2021-03-26T20:54:22.000Z
|
2021-04-06T17:18:53.000Z
|
venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
|
professorbee/randomplushmiku
|
b2db186a5d081da0cb00b8c73dee9eff6047b1f1
|
[
"MIT"
] | 2
|
2021-04-01T23:43:20.000Z
|
2021-04-27T13:35:28.000Z
|
# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2017-2020 hippo91 <guillaume.peillex@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for numpy ndarray class."""
import functools
import astroid
def infer_numpy_ndarray(node, context=None):
ndarray = """
class ndarray(object):
def __init__(self, shape, dtype=float, buffer=None, offset=0,
strides=None, order=None):
self.T = numpy.ndarray([0, 0])
self.base = None
self.ctypes = None
self.data = None
self.dtype = None
self.flags = None
# Should be a numpy.flatiter instance but not available for now
# Putting an array instead so that iteration and indexing are authorized
self.flat = np.ndarray([0, 0])
self.imag = np.ndarray([0, 0])
self.itemsize = None
self.nbytes = None
self.ndim = None
self.real = np.ndarray([0, 0])
self.shape = numpy.ndarray([0, 0])
self.size = None
self.strides = None
def __abs__(self): return numpy.ndarray([0, 0])
def __add__(self, value): return numpy.ndarray([0, 0])
def __and__(self, value): return numpy.ndarray([0, 0])
def __array__(self, dtype=None): return numpy.ndarray([0, 0])
def __array_wrap__(self, obj): return numpy.ndarray([0, 0])
def __contains__(self, key): return True
def __copy__(self): return numpy.ndarray([0, 0])
def __deepcopy__(self, memo): return numpy.ndarray([0, 0])
def __divmod__(self, value): return (numpy.ndarray([0, 0]), numpy.ndarray([0, 0]))
def __eq__(self, value): return numpy.ndarray([0, 0])
def __float__(self): return 0.
def __floordiv__(self): return numpy.ndarray([0, 0])
def __ge__(self, value): return numpy.ndarray([0, 0])
def __getitem__(self, key): return uninferable
def __gt__(self, value): return numpy.ndarray([0, 0])
def __iadd__(self, value): return numpy.ndarray([0, 0])
def __iand__(self, value): return numpy.ndarray([0, 0])
def __ifloordiv__(self, value): return numpy.ndarray([0, 0])
def __ilshift__(self, value): return numpy.ndarray([0, 0])
def __imod__(self, value): return numpy.ndarray([0, 0])
def __imul__(self, value): return numpy.ndarray([0, 0])
def __int__(self): return 0
def __invert__(self): return numpy.ndarray([0, 0])
def __ior__(self, value): return numpy.ndarray([0, 0])
def __ipow__(self, value): return numpy.ndarray([0, 0])
def __irshift__(self, value): return numpy.ndarray([0, 0])
def __isub__(self, value): return numpy.ndarray([0, 0])
def __itruediv__(self, value): return numpy.ndarray([0, 0])
def __ixor__(self, value): return numpy.ndarray([0, 0])
def __le__(self, value): return numpy.ndarray([0, 0])
def __len__(self): return 1
def __lshift__(self, value): return numpy.ndarray([0, 0])
def __lt__(self, value): return numpy.ndarray([0, 0])
def __matmul__(self, value): return numpy.ndarray([0, 0])
def __mod__(self, value): return numpy.ndarray([0, 0])
def __mul__(self, value): return numpy.ndarray([0, 0])
def __ne__(self, value): return numpy.ndarray([0, 0])
def __neg__(self): return numpy.ndarray([0, 0])
def __or__(self, value): return numpy.ndarray([0, 0])
def __pos__(self): return numpy.ndarray([0, 0])
def __pow__(self): return numpy.ndarray([0, 0])
def __repr__(self): return str()
def __rshift__(self): return numpy.ndarray([0, 0])
def __setitem__(self, key, value): return uninferable
def __str__(self): return str()
def __sub__(self, value): return numpy.ndarray([0, 0])
def __truediv__(self, value): return numpy.ndarray([0, 0])
def __xor__(self, value): return numpy.ndarray([0, 0])
def all(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
def any(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
def argmax(self, axis=None, out=None): return np.ndarray([0, 0])
def argmin(self, axis=None, out=None): return np.ndarray([0, 0])
def argpartition(self, kth, axis=-1, kind='introselect', order=None): return np.ndarray([0, 0])
def argsort(self, axis=-1, kind='quicksort', order=None): return np.ndarray([0, 0])
def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
def byteswap(self, inplace=False): return np.ndarray([0, 0])
def choose(self, choices, out=None, mode='raise'): return np.ndarray([0, 0])
def clip(self, min=None, max=None, out=None): return np.ndarray([0, 0])
def compress(self, condition, axis=None, out=None): return np.ndarray([0, 0])
def conj(self): return np.ndarray([0, 0])
def conjugate(self): return np.ndarray([0, 0])
def copy(self, order='C'): return np.ndarray([0, 0])
def cumprod(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
def cumsum(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
def diagonal(self, offset=0, axis1=0, axis2=1): return np.ndarray([0, 0])
def dot(self, b, out=None): return np.ndarray([0, 0])
def dump(self, file): return None
def dumps(self): return str()
def fill(self, value): return None
def flatten(self, order='C'): return np.ndarray([0, 0])
def getfield(self, dtype, offset=0): return np.ndarray([0, 0])
def item(self, *args): return uninferable
def itemset(self, *args): return None
def max(self, axis=None, out=None): return np.ndarray([0, 0])
def mean(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
def min(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
def newbyteorder(self, new_order='S'): return np.ndarray([0, 0])
def nonzero(self): return (1,)
def partition(self, kth, axis=-1, kind='introselect', order=None): return None
def prod(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
def ptp(self, axis=None, out=None): return np.ndarray([0, 0])
def put(self, indices, values, mode='raise'): return None
def ravel(self, order='C'): return np.ndarray([0, 0])
def repeat(self, repeats, axis=None): return np.ndarray([0, 0])
def reshape(self, shape, order='C'): return np.ndarray([0, 0])
def resize(self, new_shape, refcheck=True): return None
def round(self, decimals=0, out=None): return np.ndarray([0, 0])
def searchsorted(self, v, side='left', sorter=None): return np.ndarray([0, 0])
def setfield(self, val, dtype, offset=0): return None
def setflags(self, write=None, align=None, uic=None): return None
def sort(self, axis=-1, kind='quicksort', order=None): return None
def squeeze(self, axis=None): return np.ndarray([0, 0])
def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
def sum(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
def swapaxes(self, axis1, axis2): return np.ndarray([0, 0])
def take(self, indices, axis=None, out=None, mode='raise'): return np.ndarray([0, 0])
def tobytes(self, order='C'): return b''
def tofile(self, fid, sep="", format="%s"): return None
def tolist(self, ): return []
def tostring(self, order='C'): return b''
def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None): return np.ndarray([0, 0])
def transpose(self, *axes): return np.ndarray([0, 0])
def var(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
def view(self, dtype=None, type=None): return np.ndarray([0, 0])
"""
node = astroid.extract_node(ndarray)
return node.infer(context=context)
def _looks_like_numpy_ndarray(node):
return isinstance(node, astroid.Attribute) and node.attrname == "ndarray"
astroid.MANAGER.register_transform(
astroid.Attribute,
astroid.inference_tip(infer_numpy_ndarray),
_looks_like_numpy_ndarray,
)
| 55.525641
| 110
| 0.625606
|
b87f92aac1f81b96b80d7c35bffc8400b1c7fbaf
| 1,933
|
py
|
Python
|
examples/transmitter/swagger_server/business_logic/generate_event.py
|
duo-labs/sharedsignals
|
15345e78c39a86fc6bafab642aca58a12cf57ef2
|
[
"BSD-3-Clause"
] | 12
|
2021-12-01T00:16:25.000Z
|
2022-02-09T06:49:23.000Z
|
examples/transmitter/swagger_server/business_logic/generate_event.py
|
duo-labs/sharedsignals
|
15345e78c39a86fc6bafab642aca58a12cf57ef2
|
[
"BSD-3-Clause"
] | 7
|
2021-11-30T19:04:40.000Z
|
2022-02-04T17:16:39.000Z
|
examples/transmitter/swagger_server/business_logic/generate_event.py
|
duo-labs/sharedsignals
|
15345e78c39a86fc6bafab642aca58a12cf57ef2
|
[
"BSD-3-Clause"
] | 3
|
2022-02-13T16:23:07.000Z
|
2022-03-15T22:31:06.000Z
|
# Copyright (c) 2021 Cisco Systems, Inc. and its affiliates
# All rights reserved.
# Use of this source code is governed by a BSD 3-Clause License
# that can be found in the LICENSE file.
from swagger_server.events import (
SecurityEvent,
SessionRevoked, TokenClaimsChange, CredentialChange,
AssuranceLevelChange, DeviceComplianceChange,
AccountDisabled, AccountEnabled, AccountPurged, IdentifierChanged,
IdentifierRecycled, CredentialCompromise, OptIn, OptOutCancelled,
OptOutEffective, OptOutInitiated, RecoveryActivated,
RecoveryInformationChanged
)
from swagger_server.models import (
Subject, EventType
)
event_type_map = {
EventType.session_revoked: SessionRevoked,
EventType.token_claims_change: TokenClaimsChange,
EventType.credential_change: CredentialChange,
EventType.assurance_level_change: AssuranceLevelChange,
EventType.device_compliance_change: DeviceComplianceChange,
EventType.account_purged: AccountPurged,
EventType.account_disabled: AccountDisabled,
EventType.account_enabled: AccountEnabled,
EventType.identifier_changed: IdentifierChanged,
EventType.identifier_recycled: IdentifierRecycled,
EventType.credential_compromise: CredentialCompromise,
EventType.opt_in: OptIn,
EventType.opt_out_initiated: OptOutInitiated,
EventType.opt_out_cancelled: OptOutCancelled,
EventType.opt_out_effective: OptOutEffective,
EventType.recovery_activated: RecoveryActivated,
EventType.recovery_information_changed: RecoveryInformationChanged,
}
def generate_security_event(event_type: EventType,
subject: Subject) -> SecurityEvent:
event_class = event_type_map[event_type]
event_attribute_name = event_type.name
security_event = {
"events": {
event_attribute_name: event_class(subject=subject),
}
}
return SecurityEvent.parse_obj(security_event)
| 37.173077
| 71
| 0.7791
|
609b2164235de06486fb445df9e3f6b85704bb9f
| 2,272
|
py
|
Python
|
venv/lib/python2.7/site-packages/openpyxl/descriptors/sequence.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | 4
|
2018-07-04T17:20:12.000Z
|
2019-07-14T18:07:25.000Z
|
venv/lib/python2.7/site-packages/openpyxl/descriptors/sequence.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
venv/lib/python2.7/site-packages/openpyxl/descriptors/sequence.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | 1
|
2018-09-03T03:02:06.000Z
|
2018-09-03T03:02:06.000Z
|
from __future__ import absolute_import
# copyright openpyxl 2010-2015
from openpyxl.compat import safe_string
from openpyxl.xml.functions import Element
from openpyxl.utils.indexed_list import IndexedList
from .base import Descriptor, _convert
from .namespace import namespaced
class Sequence(Descriptor):
"""
A sequence (list or tuple) that may only contain objects of the declared
type
"""
expected_type = type(None)
seq_types = (list, tuple)
idx_base = 0
unique = False
def __set__(self, instance, seq):
if not isinstance(seq, self.seq_types):
raise TypeError("Value must be a sequence")
seq = [_convert(self.expected_type, value) for value in seq]
if self.unique:
seq = IndexedList(seq)
super(Sequence, self).__set__(instance, seq)
def to_tree(self, tagname, obj, namespace=None):
"""
Convert the sequence represented by the descriptor to an XML element
"""
tagname = namespaced(obj, tagname, namespace)
for idx, v in enumerate(obj, self.idx_base):
if hasattr(v, "to_tree"):
el = v.to_tree(tagname, idx)
else:
el = Element(tagname)
el.text = safe_string(v)
yield el
class ValueSequence(Sequence):
"""
A sequence of primitive types that are stored as a single attribute.
"val" is the default attribute
"""
attribute = "val"
def to_tree(self, tagname, obj, namespace=None):
tagname = namespaced(self, tagname, namespace)
for v in obj:
yield Element(tagname, {self.attribute:safe_string(v)})
def from_tree(self, node):
return node.get(self.attribute)
class NestedSequence(Sequence):
"""
Wrap a sequence in an containing object
"""
count = True
def to_tree(self, tagname, obj, namespace=None):
tagname = namespaced(self, tagname, namespace)
container = Element(tagname)
if self.count:
container.set('count', str(len(obj)))
for v in obj:
container.append(v.to_tree())
return container
def from_tree(self, node):
return [self.expected_type.from_tree(el) for el in node]
| 26.114943
| 76
| 0.631602
|
bda3e468bc071bc89002eac8a5399b49fcf5b76c
| 18,362
|
py
|
Python
|
Telstra_EventDetection/models/resister_phone_number_list.py
|
telstra/EventDetectionAPI-SDK-python
|
94f3bb56ebd3e7bcb8818af3b1b43d910f92bd1f
|
[
"Apache-2.0"
] | 3
|
2018-05-23T11:12:25.000Z
|
2020-05-18T00:53:50.000Z
|
Telstra_EventDetection/models/resister_phone_number_list.py
|
telstra/EventDetectionAPI-SDK-python
|
94f3bb56ebd3e7bcb8818af3b1b43d910f92bd1f
|
[
"Apache-2.0"
] | null | null | null |
Telstra_EventDetection/models/resister_phone_number_list.py
|
telstra/EventDetectionAPI-SDK-python
|
94f3bb56ebd3e7bcb8818af3b1b43d910f92bd1f
|
[
"Apache-2.0"
] | 1
|
2018-12-10T01:35:11.000Z
|
2018-12-10T01:35:11.000Z
|
# coding: utf-8
"""
Telstra Event Detection API
# Introduction Telstra's Event Detection API provides the ability to subscribe to and receive mobile network events for registered mobile numbers associated with Telstra's mobile network, such as; SIM swap, port-in, port-out, new MSIDN, new mobile service and cancelled mobile service, as well as carrier-detection. ## Features Event Detection API provides these features | Feature | Description | |---|---| |`SIM swap` | Returns timestamped event data when any of the following network events occurs in connection with a registered mobile number associated with Telstra’s mobile network: SIM swap, port-in, port-out, new MSISDN, new mobile service or cancelled mobile service | |`Carrier Detection` | Find out what Australian carrier a mobile number is subscribed to | |`International Roaming` | *Coming soon.* Will indicate if a mobile number is operaing in Australia or outside of Australia. | ## Getting access to the API The Event Detection API is available on our Enterprise Plans only. Please submit your [sales enquiry](https://dev.telstra.com/content/sales-enquiry-contact-form) . Or contact your Telstra Account Executive. We're available Monday to Friday 9am - 5pm. ## Frequently asked questions **Q: What is the Telstra Event Detection (TED) API?** A: The Telstra Event Detection (TED) API is a subscription based service from Telstra that enables a customer to be alerted when a particular network event is detected in connection with a registered mobile number that may indicate that a fraudulent misuse of an end user’s mobility service is about to occur. **Q: What are the network events that the TED API can detect?** A: Currently the TED API is able to detect a bundle of events associated with Telstra SIM swaps. **Q: Can TED API detect number porting between operators other than Telstra? E.g. Optus to Vodafone?** A: No, we don’t report these type of events at present. **Q: How quickly are the network events detected?** A: This will vary depending on the event being detected, but generally we detect the event within a couple of seconds of it occurring and notify subscribers within near real time via the API. **Q: How long does Telstra store the event data for?** A: Event data is stored for 90 days from the occurrence of a network event and then securely purged. **Q: Is there a limit to the number of registered mobile numbers I can have for the Telstra Event Detection API?** A: No. You may have as many Telstra Event Detection API registered mobile numbers as you require within practical limits. **Q: Why is monitoring for SIM SWAP events important?** A: Criminals are becoming much more savvy and will often try to circumvent two factor authentication protocols by swapping the SIM card for a particular mobile number in order to gain fraudulent access to the end user’s service. Monitoring for SIM swap events may provide early detection that this is occurring and help prevent criminals from being successful in their endeavours. **Q: If an end user is currently a customer of a Telstra Reseller that still utilises the Telstra Network, am I able to detect their Network events?** A: No. Telstra resellers such as Aldi Mobile are Mobile Virtual Network Operators (MVNO) that operate as totally independent businesses to Telstra. The Telstra SIM swap API does not monitor MNVO network events at present. **Q: How do I purchase Telstra Event Detection API?** A: At the moment, the Telstra Event Detection API is only available through your Telstra Account Manager. If you don't have a Telstra Account Manager, or are not sure who they are, please submit a [sales enquiry](https://dev.telstra.com/content/sales-enquiry-contact-form). **Q: What support options are available for the Telstra Event Detection API?** A: We provide 24/7 telephone based technical support (for paid plans) along with email support and an online community forum. **Q: Do you detect network events from another carrier?** A: The Telstra Event Detection API detects network events associated with the Telstra network and Telstra mobile services. **Q: Which Telstra personnel have access to the event detection data?** A: Access to Telstra Event Detection data is restricted to only Telstra personnel that require access for the purposes of providing the service. **Q: Why should I purchase the Telstra Event Detection API from Telstra?** A: As the network events are occurring on the Telstra network, Telstra is in a position to be able to provide fast notification of an event as it is occurring, helping subscribers to prevent fraudulent activity from occurring and to minimise the resulting financial losses. **Q: If I require assistance setting up my Telstra Event Detection API, are there any Professional Services options available to me?** A: At the current time, the Telstra Event Detection API does not have any Professional Service options available. **Q: What subscription options are available for Telstra Event Detection API?** A: There is a month-by-month Pay As You Go (PAYG) plan or 12 Month contract option available. **Q: Do Early Termination Charges (ETC’s) apply?** A: If you have subscribed to a 12 month contract and want to terminate the plan or downgrade to a lower plan before the expiry of your existing 12 month term, we may charge you ETCs. **Q: What privacy requirements apply to my use of the Telstra Event Detection API?** A: Before registering an end user’s mobile number with Telstra Event Detection API, you must: 1. prepare an “End User Notification” for our approval, which sets out what end user information will be disclosed via the API, the purposes for which that information will be disclosed, and to which third parties that information will be disclosed; 2. provide each of your end user with the End User Notification; and 3. obtain express, informed consent from each end user to the use and disclosure of their event data via the API for the purposes set out in the notification. **Q: What terms and conditions apply to my use of the Telstra Event Detection API?** A: Before using the Telstra Event Detection API, you must agree to the TED API [“Our Customer Terms”](https://www.telstra.com.au/customer-terms/business-government#cloud-services). # Getting Started First step is to create an `App`. After you've created an `App`, follow these steps 1. Authenticate by getting an Oauth token 2. Use the Event Detection API ## Run in Postman To get started quickly and easily with all the features of the Event Detection API, download the Postman collection here <a href=\"https://app.getpostman.com/run-collection/8ab2273e066e5c6fd653#?env%5BEvent%20Detection%20API%5D=W3sidHlwZSI6InRleHQiLCJlbmFibGVkIjp0cnVlLCJrZXkiOiJjbGllbnRfaWQiLCJ2YWx1ZSI6ImNsaWVudF9pZCJ9LHsidHlwZSI6InRleHQiLCJlbmFibGVkIjp0cnVlLCJrZXkiOiJjbGllbnRfc2VjcmV0IiwidmFsdWUiOiJjbGllbnRfc2VjcmV0In0seyJ0eXBlIjoidGV4dCIsImVuYWJsZWQiOnRydWUsImtleSI6ImFjY2Vzc190b2tlbiIsInZhbHVlIjoiaTZPdmtyelVuc3hvODhrcU9BMXg4RWtPVWxuSyJ9LHsidHlwZSI6InRleHQiLCJlbmFibGVkIjp0cnVlLCJrZXkiOiJob3N0IiwidmFsdWUiOiJ0YXBpLnRlbHN0cmEuY29tIn0seyJ0eXBlIjoidGV4dCIsImVuYWJsZWQiOnRydWUsImtleSI6Im9hdXRoLWhvc3QiLCJ2YWx1ZSI6InRhcGkudGVsc3RyYS5jb20ifV0=\"><img alt=\"Run in Postman\" src=\"https://run.pstmn.io/button.svg\" /></a> ## Authentication To get an OAuth 2.0 Authentication token, pass through your Consumer Key and Consumer Secret that you received when you registered for the Event Detection API key. The `grant_type` should be left as `client_credentials` and the scope as v1_eventdetection_simswap. The token will expire in one hour. Get your keys by creating an `App`. # Request ` CONSUMER_KEY=\"your consumer key\" CONSUMER_SECRET=\"your consumer secret\" curl -X POST -H 'Content-Type: application/x-www-form-urlencoded' \\ -d 'grant_type=client_credentials&client_id=$CONSUMER_KEY&client_secret=$CONSUMER_SECRET&scope=v1_eventdetection_simswap' \\ 'https://tapi.telstra.com/v2/oauth/token' ` # Response `{ \"access_token\" : \"1234567890123456788901234567\", \"token_type\" : \"Bearer\", \"expires_in\" : \"3599\" }` ## Subscribe mobile numbers Subscribing end user mobile numbers informs the API to register that mobile number so that you can poll those numbers for particular events. You can subscribe and unsubscribe numbers (opt in and opt out) against this service. Only numbers that are opted in (i.e. subscribed) can be polled for events. You must have obtained your end customer’s consent before you can opt them into the Event Detection service. # Request `curl -X POST -H 'content-type: application/json' \\ -H 'Authorization: Bearer $TOKEN' \\ -d '{ \"msisdns\": [ \"61467754783\" ], \"eventType\": \"simswap\", \"notificationUrl\": \"https://requestb.in/161r14g1\" }' \\ 'https://tapi.telstra.com/v1/eventdetection/events'` | Parameter | Description | |---|---| |`msisdns` | list of mobile numbers that has to be registered for the event | |`eventType` | event Type to be subscribed to | |`notificationUrl` | URL where the event notifications has to be posted (Optional) | # Response `{ \"msisdns\": [ { \"msisdn\": \"61467754783\", \"description\": \"opt-in status updated for this MSISDN\", \"carrierName\": \"Telstra\" } ] }` | Parameter | Description | |---|---| |`msisdn` | msisdn | |`description` | status description indicating if the msisdn was opted-in| |`carrierName` | carrier name for the msisdn | ## Unsubscribe mobile numbers Unsubscribe mobile numbers against a particular event # Request `curl -X DELETE -H 'content-type: application/json' \\ -H 'Authorization: Bearer $token' \\ -d '{\"msisdns\": [\"61467754783\"]}' \\ 'https://tapi.telstra.com/v1/eventdetection/events/{event-type}'` | Parameter | Description | |---|---| |`msisdns` | list of mobile numbers that has to be unsubscribed from the event | |`eventType` | event Type to be unsubscribed from | |`notificationUrl` | notification URL that has to be removed (Optional) | # Response ` { \"msisdns\": [ { \"msisdn\": \"61467754783\", \"description\": \"opt-out status updated for this MSISDN\", \"carrierName\": \"Telstra\" } ] } ` | Parameter | Description | |---|---| |`msisdn` | msisdn | |`description` | status description indicating if the msisdn was opted-out | |`carrierName` | carrier name for the msisdn | ## Get event subscriptions Get the list of events subscribed for # Request `curl -X POST -H 'content-type: application/json' \\ -H 'Authorization: Bearer $TOKEN' \\ -d '{ \"msisdns\": [ \"61467754783\" ] }' \\ 'https://tapi.telstra.com/v1/eventdetection/events/subscriptions'` | Parameter | Description | |---|---| |`msisdns` | list of msisdns to get the subscription details | # Response ` { \"notificationURL\": \"https://requestb.in/161r14g1\", \"subscriptions\": [ { \"msisdn\": \"61467754783\", \"events\": [ \"SIM_SWAP\" ], \"carrierName\": \"Telstra\" } ] } ` | Parameter | Description | |---|---| |`notificationURL` | notification URL configured while registering msisdns | |`msisdn` | msisdn | |`events` | list of subscribed events for that msisdn | |`carrierName` | carrier name for the msisdn | ## Poll events Poll events for a given set of msisdns # Request `curl -X POST -H 'content-type: application/json' \\ -H 'Authorization: Bearer $token' \\ -d '{ \"msisdns\": [ \"61467754783\", \"61467984007\" ] }' \\ 'https://tapi.telstra.com/v1/eventdetection/events/{event_type}'` Parameter | Description | |---|---| |`msisdns` | list of msisdns to be polled for events | |`eventType` | event Type to be polled for | # Response ` { \"eventType\": \"simswap\", \"msisdns\": [ { \"msisdn\": \"+61467754783\", \"mobileServiceEvents\": [ { \"eventId\": \"NEW_SIM\", \"eventDate\": \"2018-01-19T14:40:34\" } ] }, { \"msisdn\": \"+61467984007\", \"mobileServiceEvents\": [ { \"eventId\": \"PORTOUT_SVC\", \"eventDate\": \"2018-02-21T15:20:01\", \"carrierName\": \"Telstra\" } ] } ] } ` | Parameter | Description | |---|---| |`eventType` | event type requested | |`msisdn` | msisdn | |`mobileServiceEvents` | list of service events | |`eventId` | Id of the event occured. Event Id can be any one of the following - NEW_MSISDN, PORTIN_SVC, PORTOUT_SVC, NEW_SIM, CREATE_SVC, DELETE_SVC | |`eventDate` | timestamp indicating when the event occured | |`carrierName` | carrier name for the msisdn. Carrier name will be returned only for port out events | ## Push notifications Push event notifications to the URL are configured with the parameter `notificationUrl` while subscribing mobile numbers. # Event notification format ` { \"eventId\": \"NEW_SIM\", \"msisdn\" : \"61467754783\", \"eventDate\" : \"2018-01-19T14:40:34\" } ` | Parameter | Description | |---|---| |`eventId` | event Id indicating the event occured. Event Id can be any one of the following - NEW_MSISDN, PORTIN_SVC, PORTOUT_SVC, NEW_SIM, CREATE_SVC, DELETE_SVC | |`msisdn` | msisdn for which the event occured | |`eventDate` | timestamp indicating when the event occured | ## SIMswap sub-features The following is a list of the sub-features for SIM swap and the description for that sub-feature. These will appear in the 'eventId' parameter in the API response payload for SIMswap events. | SIM swap Sub-Feature | Description | |---|---| |`NEW_MSISDN` | The MSISDN of a service changes. The SIM card is not changed. Results in two events being created: 1) CREATE_SVC/PORT_IN_SVC for the new number, and 2) a NEW_MSISDN for the old MSISDN | |`PORTIN_SVC` | A MSISDN registered for event detection is created as a mobile service on the Telstra network (note: if the MSISDN was not already registered by at least one customer for at least one event type, this event would be interpreted as a CREATE_SVC) | |`PORTOUT_SVC` | The MSISDN is ported out from Telstra to another domestic operator | |`NEW_SIM` | An existing Telstra MSISDN is moved onto a new SIM | |`CREATE_SVC` | A new mobile service is created on the Telstra network (a new SIM and a new MSISDN) | |`DELETE_SVC` | A mobile service (MSISDN and SIM) on the Telstra network is cancelled outright (as opposed to ported out to another domestic network) | ## SDK repos * [Event Detection API - Java SDK](https://github.com/telstra/EventDetectionAPI-SDK-java) * [Event Detection API - .Net2 SDK](https://github.com/telstra/EventDetectionAPI-SDK-dotnet) * [Event Detection API - NodeJS SDK](https://github.com/telstra/EventDetectionAPI-SDK-node) * [Event Detection API - PHP SDK](https://github.com/telstra/EventDetectionAPI-SDK-php) * [Event Detection API - Python SDK](https://github.com/telstra/EventDetectionAPI-SDK-python) * [Event Detection API - Ruby SDK](https://github.com/telstra/EventDetectionAPI-SDK-ruby) # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from Telstra_EventDetection.models.test import Test # noqa: F401,E501
class ResisterPhoneNumberList(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'msisdns': 'list[Test]'
}
attribute_map = {
'msisdns': 'msisdns'
}
def __init__(self, msisdns=None): # noqa: E501
"""ResisterPhoneNumberList - a model defined in Swagger""" # noqa: E501
self._msisdns = None
self.discriminator = None
if msisdns is not None:
self.msisdns = msisdns
@property
def msisdns(self):
"""Gets the msisdns of this ResisterPhoneNumberList. # noqa: E501
MSISDNs array # noqa: E501
:return: The msisdns of this ResisterPhoneNumberList. # noqa: E501
:rtype: list[Test]
"""
return self._msisdns
@msisdns.setter
def msisdns(self, msisdns):
"""Sets the msisdns of this ResisterPhoneNumberList.
MSISDNs array # noqa: E501
:param msisdns: The msisdns of this ResisterPhoneNumberList. # noqa: E501
:type: list[Test]
"""
self._msisdns = msisdns
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResisterPhoneNumberList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 156.940171
| 15,265
| 0.696711
|
ba3c90e9a6c7b14a6e25f2e63043f261ad425a7c
| 4,347
|
py
|
Python
|
PRAI/praiapp/models.py
|
vishwabasak41/praicopy2
|
45fca251608d1621e75bfcc963bf5ff29d695336
|
[
"MIT"
] | null | null | null |
PRAI/praiapp/models.py
|
vishwabasak41/praicopy2
|
45fca251608d1621e75bfcc963bf5ff29d695336
|
[
"MIT"
] | null | null | null |
PRAI/praiapp/models.py
|
vishwabasak41/praicopy2
|
45fca251608d1621e75bfcc963bf5ff29d695336
|
[
"MIT"
] | null | null | null |
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from __future__ import unicode_literals
from django.db import models
class Signup(models.Model):
id = models.AutoField(db_column='Id', primary_key=True) # Field name made lowercase.
name = models.CharField(db_column='Name', max_length=30, blank=True, null=True) # Field name made lowercase.
email = models.CharField(db_column='Email', max_length=30, blank=True, null=True) # Field name made lowercase.
passwd = models.CharField(db_column='Passwd', max_length=40, blank=True, null=True) # Field name made lowercase.
class Meta:
managed = False
db_table = 'SIGNUP'
class AuthGroup(models.Model):
name = models.CharField(unique=True, max_length=80)
class Meta:
managed = False
db_table = 'auth_group'
class AuthGroupPermissions(models.Model):
group = models.ForeignKey(AuthGroup, models.DO_NOTHING)
permission = models.ForeignKey('AuthPermission', models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_group_permissions'
unique_together = (('group', 'permission'),)
class AuthPermission(models.Model):
name = models.CharField(max_length=255)
content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING)
codename = models.CharField(max_length=100)
class Meta:
managed = False
db_table = 'auth_permission'
unique_together = (('content_type', 'codename'),)
class AuthUser(models.Model):
password = models.CharField(max_length=128)
last_login = models.DateTimeField(blank=True, null=True)
is_superuser = models.IntegerField()
username = models.CharField(unique=True, max_length=150)
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.CharField(max_length=254)
is_staff = models.IntegerField()
is_active = models.IntegerField()
date_joined = models.DateTimeField()
class Meta:
managed = False
db_table = 'auth_user'
class AuthUserGroups(models.Model):
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
group = models.ForeignKey(AuthGroup, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_user_groups'
unique_together = (('user', 'group'),)
class AuthUserUserPermissions(models.Model):
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
permission = models.ForeignKey(AuthPermission, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_user_user_permissions'
unique_together = (('user', 'permission'),)
class DjangoAdminLog(models.Model):
action_time = models.DateTimeField()
object_id = models.TextField(blank=True, null=True)
object_repr = models.CharField(max_length=200)
action_flag = models.SmallIntegerField()
change_message = models.TextField()
content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING, blank=True, null=True)
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'django_admin_log'
class DjangoContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(max_length=100)
class Meta:
managed = False
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
class DjangoMigrations(models.Model):
app = models.CharField(max_length=255)
name = models.CharField(max_length=255)
applied = models.DateTimeField()
class Meta:
managed = False
db_table = 'django_migrations'
class DjangoSession(models.Model):
session_key = models.CharField(primary_key=True, max_length=40)
session_data = models.TextField()
expire_date = models.DateTimeField()
class Meta:
managed = False
db_table = 'django_session'
| 32.931818
| 117
| 0.705544
|
972f09c4319f9569415d79e6579cc75589308765
| 799
|
py
|
Python
|
tests/syntax/IndentationError.py
|
augustand/Nuitka
|
b7b9dd50b60505a309f430ce17cad36fb7d75048
|
[
"Apache-2.0"
] | null | null | null |
tests/syntax/IndentationError.py
|
augustand/Nuitka
|
b7b9dd50b60505a309f430ce17cad36fb7d75048
|
[
"Apache-2.0"
] | null | null | null |
tests/syntax/IndentationError.py
|
augustand/Nuitka
|
b7b9dd50b60505a309f430ce17cad36fb7d75048
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def someFunc():
a
b
| 36.318182
| 79
| 0.705882
|
297d8d192f8ca2bde9a082a382b86ecbdb57e636
| 475
|
py
|
Python
|
sachima/sachima_http_server.py
|
gitter-badger/Sachima
|
76547fb6a21f1fea597994e6ee02c5db080d1e7a
|
[
"MIT"
] | null | null | null |
sachima/sachima_http_server.py
|
gitter-badger/Sachima
|
76547fb6a21f1fea597994e6ee02c5db080d1e7a
|
[
"MIT"
] | null | null | null |
sachima/sachima_http_server.py
|
gitter-badger/Sachima
|
76547fb6a21f1fea597994e6ee02c5db080d1e7a
|
[
"MIT"
] | null | null | null |
from nameko.standalone.rpc import ClusterRpcProxy
from sanic import Sanic
from sanic.response import json
from sachima import conf
app = Sanic()
CONFIG = conf.get("BROKER")
def sachima_rpc_reports(req):
with ClusterRpcProxy(CONFIG) as rpc:
# print(req)
res = rpc.data.get_report(req.json)
return res
@app.route("/reports", methods=["POST"])
async def sachima(request):
# print(request)
return json(sachima_rpc_reports(request))
| 20.652174
| 49
| 0.703158
|
801b3abb1a4a71e319aa3974d314b61db36707c2
| 2,238
|
py
|
Python
|
c2cgeoportal/subscribers.py
|
craxxkid/c2cgeoportal
|
60ca7d5d014d69b0a938f858271c911a30da77c3
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
c2cgeoportal/subscribers.py
|
craxxkid/c2cgeoportal
|
60ca7d5d014d69b0a938f858271c911a30da77c3
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
c2cgeoportal/subscribers.py
|
craxxkid/c2cgeoportal
|
60ca7d5d014d69b0a938f858271c911a30da77c3
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2016, Camptocamp SA
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
from pyramid.i18n import get_localizer, TranslationStringFactory
from pyramid.events import subscriber, BeforeRender, NewRequest
@subscriber(BeforeRender)
def add_renderer_globals(event):
request = event.get("request")
if request:
event["_"] = request.translate
event["localizer"] = request.localizer
tsf = TranslationStringFactory("c2cgeoportal")
@subscriber(NewRequest)
def add_localizer(event):
request = event.request
localizer = get_localizer(request)
def auto_translate(string):
return localizer.translate(tsf(string))
request.localizer = localizer
request.translate = auto_translate
| 40.690909
| 81
| 0.773458
|
fb572150d5d9dea15c7224dc7178c709fea7aef4
| 348
|
py
|
Python
|
src/helper_addressbook.py
|
kaue/PyBitmessage
|
7b8bf082ff0d569f507d65e087000e4e3d6ccf3f
|
[
"MIT",
"BSD-2-Clause-FreeBSD"
] | 1,583
|
2015-01-01T13:03:20.000Z
|
2022-03-31T23:10:00.000Z
|
src/helper_addressbook.py
|
kaue/PyBitmessage
|
7b8bf082ff0d569f507d65e087000e4e3d6ccf3f
|
[
"MIT",
"BSD-2-Clause-FreeBSD"
] | 841
|
2015-01-01T14:51:48.000Z
|
2022-03-25T06:45:14.000Z
|
src/helper_addressbook.py
|
kaue/PyBitmessage
|
7b8bf082ff0d569f507d65e087000e4e3d6ccf3f
|
[
"MIT",
"BSD-2-Clause-FreeBSD"
] | 482
|
2015-01-07T00:53:25.000Z
|
2022-03-24T15:58:12.000Z
|
"""
Insert value into addressbook
"""
from bmconfigparser import BMConfigParser
from helper_sql import sqlExecute
def insert(address, label):
"""perform insert into addressbook"""
if address not in BMConfigParser().addresses():
return sqlExecute('''INSERT INTO addressbook VALUES (?,?)''', label, address) == 1
return False
| 23.2
| 90
| 0.70977
|
5aec5d8e38aba39e6aba9a8f19637587c2f12544
| 9,128
|
py
|
Python
|
python/paddle/fluid/tests/unittests/test_elementwise_add_op.py
|
ysh329/Paddle
|
50ad9046c9a440564d104eaa354eb9df83a35678
|
[
"Apache-2.0"
] | 9
|
2017-12-04T02:58:01.000Z
|
2020-12-03T14:46:30.000Z
|
python/paddle/fluid/tests/unittests/test_elementwise_add_op.py
|
ysh329/Paddle
|
50ad9046c9a440564d104eaa354eb9df83a35678
|
[
"Apache-2.0"
] | 7
|
2017-12-05T20:29:08.000Z
|
2018-10-15T08:57:40.000Z
|
python/paddle/fluid/tests/unittests/test_elementwise_add_op.py
|
ysh329/Paddle
|
50ad9046c9a440564d104eaa354eb9df83a35678
|
[
"Apache-2.0"
] | 6
|
2018-03-19T22:38:46.000Z
|
2019-11-01T22:28:27.000Z
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid.core as core
from op_test import OpTest
class TestElementwiseAddOp(OpTest):
def init_kernel_type(self):
self.use_mkldnn = False
def setUp(self):
self.op_type = "elementwise_add"
self.dtype = np.float32
self.axis = -1
self.init_dtype()
self.init_input_output()
self.init_kernel_type()
self.init_axis()
self.inputs = {
'X': OpTest.np_dtype_to_fluid_dtype(self.x),
'Y': OpTest.np_dtype_to_fluid_dtype(self.y)
}
self.attrs = {'axis': self.axis, 'use_mkldnn': self.use_mkldnn}
self.outputs = {'Out': self.out}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
if self.dtype == np.float16:
return
self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.005)
def test_check_grad_ingore_x(self):
if self.dtype == np.float16:
return
self.check_grad(
['Y'], 'Out', max_relative_error=0.005, no_grad_set=set("X"))
def test_check_grad_ingore_y(self):
if self.dtype == np.float16:
return
self.check_grad(
['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y'))
def init_input_output(self):
self.x = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
self.y = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
self.out = np.add(self.x, self.y)
def init_dtype(self):
pass
def init_axis(self):
pass
class TestFP16ElementwiseAddOp(TestElementwiseAddOp):
def init_dtype(self):
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
class TestElementwiseAddOp_scalar(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y
class TestFP16ElementwiseAddOp_scalar(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y
class TestElementwiseAddOp_scalar2(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1, 1).astype(self.dtype)
self.out = self.x + self.y
class TestFP16ElementwiseAddOp_scalar2(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1, 1).astype(self.dtype)
self.out = self.x + self.y
class TestElementwiseAddOp_Vector(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.random((32, )).astype(self.dtype)
self.y = np.random.random((32, )).astype(self.dtype)
self.out = np.add(self.x, self.y)
class TestFP16ElementwiseAddOp_Vector(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.random((32, )).astype(self.dtype)
self.y = np.random.random((32, )).astype(self.dtype)
self.out = np.add(self.x, self.y)
class TestElementwiseAddOp_broadcast_0(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(2).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1)
def init_axis(self):
self.axis = 0
class TestFP16ElementwiseAddOp_broadcast_0(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(2).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1)
def init_axis(self):
self.axis = 0
class TestElementwiseAddOp_broadcast_1(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 1)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_broadcast_1(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 1)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_broadcast_2(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1, 4)
class TestFP16ElementwiseAddOp_broadcast_2(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1, 4)
class TestElementwiseAddOp_broadcast_3(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4, 1)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_broadcast_3(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4, 1)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_broadcast_4(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(2, 1).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1, 1)
def init_axis(self):
self.axis = 0
class TestFP16ElementwiseAddOp_broadcast_4(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(2, 1).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1, 1)
def init_axis(self):
self.axis = 0
class TestElementwiseAddOp_rowwise_add_0(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_rowwise_add_0(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_rowwise_add_1(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 1).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_rowwise_add_1(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 1).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_channelwise_add(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(3, 20, 20).astype(self.dtype)
self.y = np.random.rand(3, 1, 1).astype(self.dtype)
self.out = self.x + self.y
def init_axis(self):
self.axis = -1
class TestFP16ElementwiseAddOp_channelwise_add(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(3, 10, 20).astype(self.dtype)
self.y = np.random.rand(3, 1, 1).astype(self.dtype)
self.out = self.x + self.y
def init_axis(self):
self.axis = -1
if __name__ == '__main__':
unittest.main()
| 32.254417
| 74
| 0.654251
|
07fe621c9c42870e4074abda4015e3d67ea27a16
| 674
|
py
|
Python
|
app/core/management/commands/wait_for_db.py
|
CasperAmandusJohansen/recipe-app-api
|
22d299a332f0ce2803a0e20aa7dabab715583377
|
[
"MIT"
] | null | null | null |
app/core/management/commands/wait_for_db.py
|
CasperAmandusJohansen/recipe-app-api
|
22d299a332f0ce2803a0e20aa7dabab715583377
|
[
"MIT"
] | null | null | null |
app/core/management/commands/wait_for_db.py
|
CasperAmandusJohansen/recipe-app-api
|
22d299a332f0ce2803a0e20aa7dabab715583377
|
[
"MIT"
] | null | null | null |
import time
from django.db import connections
from django.db.utils import OperationalError
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Django command to pause execution until database is available
"""
def handle(self, *args, **options):
self.stdout.write('Waiting for database...')
db_conn = None
while not db_conn:
try:
db_conn = connections['default']
except OperationalError:
self.stdout.write('Database unavailable, waiting 1 sec')
time.sleep(1)
self.stdout.write(self.style.SUCCESS('Database available!'))
| 30.636364
| 72
| 0.645401
|
c918570b4df2d3da5dcbcdab9bf1fd442b54a6ef
| 2,772
|
py
|
Python
|
sensible-paste-up/sensible-paste-up.py
|
catseye/NaNoGenLab
|
3e4a7314e6023557856e1cc910e9d0edc4daf43c
|
[
"Unlicense"
] | 20
|
2015-06-05T14:02:12.000Z
|
2021-11-02T22:19:18.000Z
|
sensible-paste-up/sensible-paste-up.py
|
catseye/NaNoGenLab
|
3e4a7314e6023557856e1cc910e9d0edc4daf43c
|
[
"Unlicense"
] | 1
|
2015-10-15T12:58:35.000Z
|
2015-10-15T12:58:35.000Z
|
sensible-paste-up/sensible-paste-up.py
|
catseye/NaNoGenLab
|
3e4a7314e6023557856e1cc910e9d0edc4daf43c
|
[
"Unlicense"
] | 1
|
2021-04-08T23:50:06.000Z
|
2021-04-08T23:50:06.000Z
|
#!/usr/bin/env python
import math
from optparse import OptionParser
import os
import random
import sys
from PIL import Image
def get_luminance(image, rectangle):
# not that I know what luminance means. we want the pick the
# "blankest" part of the canvas, is all. this is an attempt
region = image.crop(rectangle)
histogram = region.histogram()
# how many times does the very brightest pixel occur?
return histogram[-1]
# could be something more like
# histogram[-1] * 256 + histogram[-2] * 128 + histogram[-3] * 64 ...
def main(argv):
optparser = OptionParser(__doc__)
optparser.add_option("--width", default=1200,
help="width of destination canvas")
optparser.add_option("--height", default=2000,
help="height of destination canvas")
(options, args) = optparser.parse_args(argv[1:])
images = []
for filename in args:
images.append(Image.open(filename))
base_width = int(options.width)
base_height = int(options.height)
canvas = Image.new('L', (base_width, base_height), color=255)
def area(image):
return image.size[0] * image.size[1]
images.sort(lambda a, b: cmp(area(b), area(a)))
for image in images:
# make n trials to find a "good" place to put this.
# use the place with the best score.
best_score = 0
best_place = None
print image
desired_point = (base_width / 2, base_height / 2)
for trial in xrange(0, 100):
score = 0
try:
paste_point = (random.randint(0, base_width - image.size[0]),
random.randint(0, base_height - image.size[1]))
except ValueError:
print "uh-oh, won't fit?"
continue
lum = get_luminance(canvas, (
paste_point[0], paste_point[1],
paste_point[0] + image.size[0], paste_point[1] + image.size[1],
))
score = lum
# also factor in distance to desired point
distance = math.sqrt(
(paste_point[0] - desired_point[0]) ** 2 +
(paste_point[1] - desired_point[1]) ** 2
)
score -= distance # ??
if score > best_score:
best_score = score
best_place = paste_point
print "improved score", best_score
print "best score:", best_score, best_place
if best_place is None:
print "Could not find good place to paste!"
else:
canvas.paste(image, best_place)
canvas.save("output.png")
if __name__ == '__main__':
import sys
main(sys.argv)
| 29.806452
| 79
| 0.568182
|
d32cc53a01f3099b2f9dd6106230edbba3224508
| 954
|
py
|
Python
|
alipay/aop/api/response/KoubeiItemModifyResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/response/KoubeiItemModifyResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/response/KoubeiItemModifyResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class KoubeiItemModifyResponse(AlipayResponse):
def __init__(self):
super(KoubeiItemModifyResponse, self).__init__()
self._item_id = None
self._request_id = None
@property
def item_id(self):
return self._item_id
@item_id.setter
def item_id(self, value):
self._item_id = value
@property
def request_id(self):
return self._request_id
@request_id.setter
def request_id(self, value):
self._request_id = value
def parse_response_content(self, response_content):
response = super(KoubeiItemModifyResponse, self).parse_response_content(response_content)
if 'item_id' in response:
self.item_id = response['item_id']
if 'request_id' in response:
self.request_id = response['request_id']
| 26.5
| 97
| 0.674004
|
94b700b52e231acc3112ed925a477cfaa49a9645
| 1,051
|
py
|
Python
|
data/nlp_importer.py
|
normangilmore/TextThresher
|
b5919389dd6a2eb1ce084cb6262e9ff7a06cc751
|
[
"Apache-2.0"
] | 21
|
2016-08-01T23:42:00.000Z
|
2020-08-06T17:41:31.000Z
|
data/nlp_importer.py
|
normangilmore/TextThresher
|
b5919389dd6a2eb1ce084cb6262e9ff7a06cc751
|
[
"Apache-2.0"
] | 38
|
2016-08-02T01:07:57.000Z
|
2017-10-27T22:48:31.000Z
|
data/nlp_importer.py
|
normangilmore/TextThresher
|
b5919389dd6a2eb1ce084cb6262e9ff7a06cc751
|
[
"Apache-2.0"
] | 11
|
2016-08-03T22:31:59.000Z
|
2021-12-28T18:35:49.000Z
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "thresher_backend.settings")
import django
django.setup()
import json
from thresher.models import NLPHints
from data.nlp_hint_types import QUESTION_TO_HINT_TYPE
def nlp_load(annotations):
resultList = json.loads(annotations)
for result in resultList:
article_id=result['article_id']
for hint in result['Hints']:
question_id = hint['qID']
hint_type = QUESTION_TO_HINT_TYPE[question_id]
highlightList = hint['Highlights']
offsetList = hint['Indices']
# Store the text after its offset pair to make a triplet, e.g.:
# [16,22,"Denver"]
# You could argue that an object would be better style. Oh well.
for i in range(len(offsetList)):
offsetList[i].append(highlightList[i])
NLPHints.objects.create(
article_id = article_id,
hint_type = hint_type,
offsets = offsetList
)
return True
| 32.84375
| 76
| 0.627022
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.