content
stringlengths 5
1.05M
|
|---|
from numpy import random as r
class Dice(object):
"""
Simulate the rolling of a dice
Parameters
----------
NONE
Attributes
----------
n_rolls_ : int
Number of rolls for the dice
result_ : array, shape = [2]
Most recent outcome of the roll of two dice
total_ : int
Sum of dice outcome
"""
def __init__(self):
self.n_rolls = 0
def roll(self):
self.n_rolls += 1
self.result = r.randint(1, 7, size=2)
self.total = sum(self.result)
def fixed_roll(self, outcome):
self.n_rolls += 1
self.result = outcome
self.total = sum(self.result)
if __name__ == "__main__":
d1 = Dice()
d1.roll()
d1.roll()
d1.roll()
print("Number of rolls: {}".format(d1.n_rolls))
print("Last Roll: {}".format(d1.result))
print("Last Roll Total: {}".format(d1.total))
|
# -*- coding: UTF-8 -*-
# =========================================================================
# Copyright (C) 2017 Yunify, Inc.
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import unicode_literals
import sys
from .base import BaseCommand
from ..utils import get_current_time
class PresignCommand(BaseCommand):
command = "presign"
usage = "%(prog)s <qs-path> [-e <expire_seconds>]"
@classmethod
def add_extra_arguments(cls, parser):
parser.add_argument(
"qs_path",
nargs="?",
default="qs://",
help="The qs-path to presign"
)
parser.add_argument(
"-e",
"--expire",
dest="expire_seconds",
type=int,
default=3600,
help="The number of seconds until the pre-signed URL expires."
)
return parser
@classmethod
def generate_presign_url(cls):
bucket, prefix = cls.validate_qs_path(cls.options.qs_path)
if prefix == "":
cls.uni_print("Error: please specify object in qs-path")
sys.exit(-1)
resp = cls.current_bucket.head_object(prefix)
# Handle common errors
if resp.status_code == 404:
cls.uni_print("Error: Please check if object <%s> exists" % prefix)
sys.exit(-1)
if resp.status_code == 403:
cls.uni_print(
"Error: Please check if you have enough"
" permission to access object <%s>." % prefix
)
sys.exit(-1)
if resp.status_code != 200:
cls.uni_print(resp.content)
sys.exit(-1)
is_public = False
# check whether the bucket is public
current_acl = cls.current_bucket.get_acl()
if current_acl.status_code == 200:
for v in current_acl["acl"]:
if v["grantee"]["name"] == "QS_ALL_USERS":
is_public = True
if is_public:
public_url = "{protocol}://{bucket_name}.{zone}.{host}/{object_key}".format(
protocol=cls.current_bucket.config.protocol,
bucket_name=bucket,
zone=cls.bucket_map[bucket],
host=cls.current_bucket.config.host,
object_key=prefix
)
cls.uni_print(public_url)
return public_url
else:
# if the bucket is non-public, generate the link with signature,
# expire seconds and other formatted parameters
prepared = cls.current_bucket.get_object_request(prefix).sign_query(
get_current_time() + cls.options.expire_seconds
)
cls.uni_print(prepared.url)
return prepared.url
@classmethod
def send_request(cls):
cls.generate_presign_url()
|
from abc import ABC, abstractmethod
from pydantic import BaseModel, create_model
from typing_extensions import Literal
from rastervision.v2.core import _registry
def register_config(type_hint, version=0, upgraders=None):
def _register_config(cls):
if version > 0:
cls = create_model(
cls.__name__,
version=(Literal[version], version),
type_hint=(Literal[type_hint], type_hint),
__base__=cls)
if upgraders is None or len(upgraders) != version:
raise ValueError(
'If version > 0, must supply list of upgraders with length'
' equal to version.')
else:
cls = create_model(
cls.__name__,
type_hint=(Literal[type_hint], type_hint),
__base__=cls)
_registry.add_config(
type_hint, cls, version=version, upgraders=upgraders)
return cls
return _register_config
def build_config(x):
if isinstance(x, dict):
new_x = {}
for k, v in x.items():
new_x[k] = build_config(v)
type_hint = new_x.get('type_hint')
if type_hint is not None:
config_cls = _registry.get_config(type_hint)
new_x = config_cls(**new_x)
return new_x
elif isinstance(x, list):
return [build_config(v) for v in x]
else:
return x
def upgrade_config(x):
if isinstance(x, dict):
new_x = {}
for k, v in x.items():
new_x[k] = upgrade_config(v)
type_hint = new_x.get('type_hint')
if type_hint is not None:
version = new_x.get('version')
if version is not None:
curr_version, upgraders = _registry.get_config_upgraders(
type_hint)
for upgrader in upgraders[version:]:
new_x = upgrader.upgrade(new_x)
new_x['version'] = curr_version
return new_x
elif isinstance(x, list):
return [upgrade_config(v) for v in x]
else:
return x
class Upgrader(ABC):
@abstractmethod
def upgrade(self, cfg_dict):
pass
class Config(BaseModel):
class Config:
extra = 'forbid'
def update(self):
pass
|
import setuptools
import milli_piyango
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name=milli_piyango.__name__,
version=milli_piyango.__version__,
author="M. Olcay TERCANLI",
author_email="molcaytercanli@gmail.com",
description="A package for getting lottery data from mpi.gov.tr(Turkish Lottery)",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/molcay/milli-piyango",
packages=setuptools.find_packages(),
install_requires=['requests'],
classifiers=(
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
),
)
|
import Bio, pandas
lengths = map(len, Bio.SeqIO.parse('/home/jit/Downloads/setu/see_long/mapped.long.fasta', 'fasta'))
pandas.Series(lengths).hist(color='gray', bins=1000)
|
# BEGIN-SCRIPT-BLOCK
#
# Script-Filter:
# $vendor eq "Cisco" and $model like /ASA/
#
# END-SCRIPT-BLOCK
from infoblox_netmri.easy import NetMRIEasy
# This values will be provided by NetMRI before execution
defaults = {
"api_url": api_url,
"http_username": http_username,
"http_password": http_password,
"job_id": job_id,
"device_id": device_id,
"batch_id": batch_id
}
# Create NetMRI context manager. It will close the session after execution
with NetMRIEasy(**defaults) as easy:
# Everything has to be indented under the context manager if we are sending commands to NetMRI
# first I want to collect the interface the existing snmp-host is using
# I only show a specific snmp-host to make it easier to parse the output
# first I split the string on the spaces, then I take the 6th split
# this will work for any length of interface name as long as the name doesn't contain spaces
parse = easy.send_command('show snmp-server host')
lines = parse.splitlines()
# Now let's use the variable 'interface' to run the commands to remove the existing Collectors single line entries
# and then create the new objects and the new Collectors snmp-host entries.
# I also show what it looked like before I change it and then after the changes have been applied.
for line in lines:
my_output = line.split()
interface = (my_output[6])
scr_ip = (my_output[3])
# we strip the ',' off with rstrip
ip = scr_ip.rstrip(',')
easy.send_command('conf t')
easy.send_command('no snmp-server host {} {}'.format(interface,ip))
easy.send_command('object network Collectors_1')
easy.send_command('range 10.10.10.84 10.10.10.104')
easy.send_command('object network Collectors_2')
easy.send_command('range 10.10.12.66 10.10.12.89')
easy.send_command('snmp-server host-group {} Collectors_1 community infoblox version 2c' .format(interface))
easy.send_command('snmp-server host-group {} Collectors_2 community infoblox version 2c' .format(interface))
easy.send_command('end')
easy.send_command('wri mem')
easy.send_command('show snmp-server host')
easy.send_command('term p 24')
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=wildcard-import,unused-wildcard-import
"""Compat shim for backwards compatability with qiskit.util."""
# The 'qiskit._util' module is deprecated and has been renamed
# 'qiskit.util'. Please update your imports as 'qiskit._util'
# will be removed in Qiskit Terra 0.9.
from qiskit.util import *
|
class Solution:
def firstMissingPositive(self, nums):
for i in range(len(nums)):
while 0 <= nums[i] < len(nums) and nums[nums[i] - 1] != nums[i]:
tmp = nums[i] - 1
nums[i], nums[tmp] = nums[tmp], nums[i]
for i, v in enumerate(nums):
if v != i + 1:
return i + 1
return len(nums) + 1
|
import Pyro4
import os
import json
import base64
class Client:
def __init__(self, host, port, identifier = "main-"):
self.host = host
self.port = port
self.identifier = identifier
self.objects = dict()
def Start(self, remoteObjects):
for obj in remoteObjects:
url = "PYRONAME:%s%s@%s:%d" % (self.identifier, obj, self.host, self.port)
self.objects[obj] = Pyro4.Proxy(url)
def GetObject(self, name):
return self.objects[name]
|
import json
import os
import random
import scipy.io
import codecs
from collections import defaultdict
class BasicDataProvider:
def __init__(self, dataset):
print ('Initializing data provider for dataset %s...' % (dataset, ))
# !assumptions on folder structure
self.dataset_root = os.path.join('data', dataset)
self.image_root = os.path.join('data', dataset, 'imgs')
# load the dataset into memory
dataset_path = os.path.join(self.dataset_root, 'dataset.json')
print ('BasicDataProvider: reading %s' % (dataset_path, ))
self.dataset = json.load(open(dataset_path, 'r'))
# load the image features into memory
features_path = os.path.join(self.dataset_root, 'vgg_feats.mat')
print ('BasicDataProvider: reading %s' % (features_path, ))
features_struct = scipy.io.loadmat(features_path)
self.features = features_struct['feats']
# group images by their train/val/test split into a dictionary -> list structure
self.split = defaultdict(list)
for img in self.dataset['images']:
self.split[img['split']].append(img)
# "PRIVATE" FUNCTIONS
# in future we may want to create copies here so that we don't touch the
# data provider class data, but for now lets do the simple thing and
# just return raw internal img sent structs. This also has the advantage
# that the driver could store various useful caching stuff in these structs
# and they will be returned in the future with the cache present
def _getImage(self, img):
""" create an image structure for the driver """
# lazily fill in some attributes
if not 'local_file_path' in img: img['local_file_path'] = os.path.join(self.image_root, img['filename'])
if not 'feat' in img: # also fill in the features
feature_index = img['imgid'] # NOTE: imgid is an integer, and it indexes into features
img['feat'] = self.features[:,feature_index]
return img
def _getSentence(self, sent):
""" create a sentence structure for the driver """
# NOOP for now
return sent
# PUBLIC FUNCTIONS
def getSplitSize(self, split, ofwhat = 'sentences'):
""" return size of a split, either number of sentences or number of images """
if ofwhat == 'sentences':
return sum(len(img['sentences']) for img in self.split[split])
else: # assume images
return len(self.split[split])
def sampleImageSentencePair(self, split = 'train'):
""" sample image sentence pair from a split """
images = self.split[split]
img = random.choice(images)
sent = random.choice(img['sentences'])
out = {}
out['image'] = self._getImage(img)
out['sentence'] = self._getSentence(sent)
return out
def iterImageSentencePair(self, split = 'train', max_images = -1):
for i,img in enumerate(self.split[split]):
if max_images >= 0 and i >= max_images: break
for sent in img['sentences']:
out = {}
out['image'] = self._getImage(img)
out['sentence'] = self._getSentence(sent)
yield out
def iterImageSentencePairBatch(self, split = 'train', max_images = -1, max_batch_size = 100):
batch = []
for i,img in enumerate(self.split[split]):
if max_images >= 0 and i >= max_images: break
for sent in img['sentences']:
out = {}
out['image'] = self._getImage(img)
out['sentence'] = self._getSentence(sent)
batch.append(out)
if len(batch) >= max_batch_size:
yield batch
batch = []
if batch:
yield batch
def iterSentences(self, split = 'train'):
for img in self.split[split]:
for sent in img['sentences']:
yield self._getSentence(sent)
def iterSentences_train_val(self):
data_train = self.split['train']
data_val = self.split['restval']
data_merge = data_train + data_val
for img in data_merge:
for sent in img['sentences']:
yield self._getSentence(sent)
def iterImages(self, split = 'train', shuffle = False, max_images = -1):
imglist = self.split[split]
ix = range(len(imglist))
if shuffle:
random.shuffle(ix)
if max_images > 0:
ix = ix[:min(len(ix),max_images)] # crop the list
for i in ix:
yield self._getImage(imglist[i])
def getDataProvider(dataset):
""" we could intercept a special dataset and return different data providers """
assert dataset in ['flickr8k', 'flickr30k', 'coco'], 'dataset %s unknown' % (dataset, )
return BasicDataProvider(dataset)
|
import torch
from torch import nn
from torch.nn import functional as F
class IDRLoss(nn.Module):
def __init__(self, idr_rgb_weight, sg_rgb_weight, eikonal_weight, mask_weight, alpha,
r_patch=-1, normalsmooth_weight=0., loss_type='L1'):
super().__init__()
self.idr_rgb_weight = idr_rgb_weight
self.sg_rgb_weight = sg_rgb_weight
self.eikonal_weight = eikonal_weight
self.mask_weight = mask_weight
self.alpha = alpha
if loss_type == 'L1':
print('Using L1 loss for comparing images!')
self.img_loss = nn.L1Loss(reduction='mean')
elif loss_type == 'L2':
print('Using L2 loss for comparing images!')
self.img_loss = nn.MSELoss(reduction='mean')
else:
raise Exception('Unknown loss_type!')
self.r_patch = int(r_patch)
self.normalsmooth_weight = normalsmooth_weight
print('Patch size in normal smooth loss: ', self.r_patch)
def get_rgb_loss(self, idr_rgb_values, sg_rgb_values, rgb_gt, network_object_mask, object_mask):
mask = network_object_mask & object_mask
if mask.sum() == 0:
return torch.tensor(0.0).cuda().float(), torch.tensor(0.0).cuda().float()
idr_rgb_values = idr_rgb_values[mask].reshape((-1, 3))
sg_rgb_values = sg_rgb_values[mask].reshape((-1, 3))
rgb_gt = rgb_gt.reshape(-1, 3)[mask].reshape((-1, 3))
idr_rgb_loss = self.img_loss(idr_rgb_values, rgb_gt)
sg_rgb_loss = self.img_loss(sg_rgb_values, rgb_gt)
return idr_rgb_loss, sg_rgb_loss
def get_eikonal_loss(self, grad_theta):
if grad_theta.shape[0] == 0:
return torch.tensor(0.0).cuda().float()
eikonal_loss = ((grad_theta.norm(2, dim=1) - 1) ** 2).mean()
return eikonal_loss
def get_mask_loss(self, sdf_output, network_object_mask, object_mask):
mask = ~(network_object_mask & object_mask)
if mask.sum() == 0:
return torch.tensor(0.0).cuda().float()
sdf_pred = -self.alpha * sdf_output[mask]
gt = object_mask[mask].float()
mask_loss = (1 / self.alpha) * F.binary_cross_entropy_with_logits(sdf_pred.squeeze(-1), gt, reduction='sum') / float(object_mask.shape[0])
return mask_loss
def get_normalsmooth_loss(self, normal, network_object_mask, object_mask):
mask = (network_object_mask & object_mask).reshape(-1, 4*self.r_patch*self.r_patch).all(dim=-1)
if self.r_patch < 1 or self.normalsmooth_weight == 0. or mask.sum() == 0.:
return torch.tensor(0.0).cuda().float()
normal = normal.view((-1, 4*self.r_patch*self.r_patch, 3))
return torch.mean(torch.var(normal, dim=1)[mask])
def forward(self, model_outputs, ground_truth):
rgb_gt = ground_truth['rgb'].cuda()
network_object_mask = model_outputs['network_object_mask']
object_mask = model_outputs['object_mask']
idr_rgb_loss, sg_rgb_loss = self.get_rgb_loss(model_outputs['idr_rgb_values'], model_outputs['sg_rgb_values'],
rgb_gt, network_object_mask, object_mask)
mask_loss = self.get_mask_loss(model_outputs['sdf_output'], network_object_mask, object_mask)
eikonal_loss = self.get_eikonal_loss(model_outputs['grad_theta'])
normalsmooth_loss = self.get_normalsmooth_loss(model_outputs['normal_values'], network_object_mask, object_mask)
loss = self.idr_rgb_weight * idr_rgb_loss + \
self.sg_rgb_weight * sg_rgb_loss + \
self.eikonal_weight * eikonal_loss + \
self.mask_weight * mask_loss + \
self.normalsmooth_weight * normalsmooth_loss
return {
'loss': loss,
'idr_rgb_loss': idr_rgb_loss,
'sg_rgb_loss': sg_rgb_loss,
'eikonal_loss': eikonal_loss,
'mask_loss': mask_loss,
'normalsmooth_loss': normalsmooth_loss
}
|
from . import id_request
|
# !/user/bin/python
# -*- coding: utf-8 -*-
#- Author : (DEK) Devendra Kavthekar
# Write a program which can filter() to make a list whose elements are
# even number between 1 and 20 (both included).
# Hints:
# Use filter() to filter elements of a list.
# Use lambda to define anonymous functions.
def do(start_number, end_number):
list = [value for value in range(start_number, end_number + 1)]
print 'List :', list
result = filter(lambda number: number % 2 == 0, list)
print 'Even Number From List :', result
def main():
do(int(raw_input('Enter Starting Number :')),
int(raw_input('Enter Ending Number :')))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
___ ___ __ ___ __ __ __
/' __` __`\ /'__`\ / __`\/\ \/\ \/\ \\
/\ \/\ \/\ \/\ __//\ \L\ \ \ \_/ \_/ \\
\ \_\ \_\ \_\ \____\ \____/\ \___x___/'
\/_/\/_/\/_/\/____/\/___/ \/__//__/
Editor-agnostic markdown live preview server.
Usage: meow [options] FILE
Options:
-q, --quiet Quiet mode.
-p port, --port=port Server port. [default: 7777]
-o file, --output=file Export to HTML mode.
-t type, --filetype=type Force specify file type.
--debug Output verbose debug logs.
"""
import logging
import os
import sys
from docopt import docopt
from ._version import __version__
from .meow import quickstart, export_html, Markup, SUPPORT_FILETYPE
def open_local_url(port):
import webbrowser
logging.debug('opening web browser...')
# from ipdb import set_trace; set_trace()
local_url = 'http://127.0.0.1:%d' % port
webbrowser.open(local_url)
def usage(markdown_file):
_, ext = os.path.splitext(markdown_file)
print('"%s" is unsupported markup type.' % ext)
print('\n[SUPPORT FILETYPE]')
for k, v in SUPPORT_FILETYPE.items():
print(' %s: %s' % (k, v['prefix']))
print('\n[REQUIREMENT MODULE]')
for k, v in SUPPORT_FILETYPE.items():
print(' %s: %s' % (k, v['module']))
def main():
args = docopt(__doc__, version=__version__)
markdown_file = os.path.abspath(args['FILE'])
output_file = args['--output']
if output_file is not None:
output_file = os.path.abspath(output_file)
# export HTML mode
if output_file is not None:
export_html(markdown_file, output_file)
return
# shut-your-mouth-up mode
if args['--quiet']:
logging_level = logging.ERROR
# logging configs
if args['--debug']:
logging_level = logging.DEBUG
use_debug = True
else:
logging_level = logging.INFO
use_debug = False
filetype = args['--filetype']
logging.basicConfig(level=logging_level,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
try:
port = int(args['--port'])
except:
sys.stderr.write('Invalid port number\n')
sys.exit(-1)
# check filetype
try:
Markup.ftdetect(markdown_file)
except ValueError as e:
if e.message == "unsupported markup":
usage(markdown_file)
return
raise e
if filetype and not Markup.has_filetype_module(filetype):
usage("dummy." + filetype)
return
# try open browser
try:
open_local_url(port)
except:
pass
print('Preview on http://127.0.0.1:%d' % port)
print('Hit Ctrl-C to quit.')
# start server
quickstart(markdown_file, port=port, debug=use_debug, filetype=filetype)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import numpy
import pandas
from copy import copy, deepcopy
import rpy2.robjects as ro
from rpy2.robjects import r, pandas2ri, numpy2ri
from rpy2.robjects.conversion import localconverter
pandas2ri.activate()
numpy2ri.activate()
r.library("lme4")
class lmer(object):
"""
Mixed effect regression
Wrapper around lme4's lmer class to enable:
- training
- predicting
"""
# Class wide constants/parameters
def __init__(
self,
target = None,
fixed_effects = [],
re_features = [],
re_terms = [],
):
# Target
self.target = target
# Exhaustive list of fixed effects (used to prepare DataFrame)
self.fixed_effects = fixed_effects
# Exhaustive list of random effects features including those used for intercept and/or slope as well as nested.
self.re_features = re_features
# List of Random Effect Terms of the form (1|RE), (FE|RE), (1|RE1/RE2), or (FE|RE1/RE2)
self.re_terms = re_terms
# Training Dataset
self.df_train = None
# R DataFrame Names
self.r_df_train_name = None
self.r_df_predict_name = None
# Fitted Model Name
self.model_name = None
# Mixed Effects Formula
self.formula = None
# R strings
self.r_train_string = None
self.r_predict_string = None
# Fitted Model parameters
self.fe_coefficients = None
self.fe_params = None
self.re_params = None
# Predictions DataFrame
self.df_preds = None
def __copy__(self):
cls = self.__class__
result = cls.__new__(cls)
result.__dict__.update(self.__dict__)
return result
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
def import_clean_trainSet(self, df_train, verbose = False):
"""
"""
self.df_train = df_train
self.df_train = self.df_train[['year'] + self.fixed_effects + self.re_features + [self.target]]
if verbose:
print(self.df_train.tail())
@staticmethod
def pandas2R(r_df_name, df, verbose = False):
"""
Handing over pandas to R
:r_df_name: Name of the DataFrame in R
:df: Pandas DataFrame
"""
with localconverter(ro.default_converter + pandas2ri.converter):
ro.globalenv[r_df_name] = ro.conversion.py2rpy(df)
def prep_R_training(self,
prefix_df = 'r_df_train',
verbose = False):
"""
Method for generating the string for lmer mixed effect model.
Default is random intercept (no random slopes)
:re_terms: list of random effect terms
:returns: concatenated model string with the correct syntax.
"""
# Compose the DataFrame name as used in R
self.r_df_train_name = prefix_df
# Handing over pandas to R
self.pandas2R(self.r_df_train_name, self.df_train, verbose=False)
# Compose the model name as used in R
self.model_name = 'lm1'
# Compose the model formula
self.formula = 'cbind(' + self.target + ") ~ "
first_term = True
for i, f in enumerate(self.fixed_effects):
if first_term:
self.formula += f
first_term = False
else:
self.formula += " + " + f
for i, r in enumerate(self.re_terms):
if first_term:
self.formula += r
first_term = False
else:
self.formula += " + " + r
# Compose the model string
self.r_train_string = self.model_name + ' <- lmer("' + self.formula + '",data=' + self.r_df_train_name + ')'
if verbose:
print(self.r_df_train_name)
print(self.model_name)
print(self.formula)
print(self.r_train_string)
@staticmethod
def fe2df(model_name):
"""
R Fixed effects parameters to Pandas DataFrame
"""
fe_coefficients = pandas.DataFrame([r('fixef(' + model_name + ')')],
index=['Estimate'],
columns=r('names(fixef(' + model_name + '))')
)
fe_params = fe_coefficients.loc[['Estimate']]
return fe_coefficients, fe_params
@staticmethod
def re2df(r_ranef):
"""
R Random effect parameters to Pandas DataFrame
r_ranef: r('ranef(lm1)')
"""
re_params = {}
for i, re_name in enumerate(r_ranef.names):
with localconverter(ro.default_converter + pandas2ri.converter):
re_params[re_name] = ro.conversion.rpy2py(r_ranef[i])
re_params[re_name] = re_params[re_name].reset_index()
re_params[re_name] = re_params[re_name].rename(columns={'index': re_name})
return re_params
def train_lmer(self, verbose=True):
"""
Fit the model using R lmer function
"""
if verbose:
print(r(self.r_train_string))
else:
r(self.r_train_string)
# Get the fixed-effect parameters
self.fe_coefficients, self.fe_params = self.fe2df(self.model_name)
# Get the random-effect parameters
self.re_params = self.re2df(r('ranef(' + self.model_name + ')'))
def predict_lmer(self, df_predict, prefix_df = 'r_df_predict'):
"""
Predict using the lmer function
:df_predict: DataFrame with the data for the prediction
"""
#self.df_predict = df_predict
# Compose the DataFrame name as used in R
self.r_df_predict_name = prefix_df
# Handing over pandas to R
self.pandas2R(self.r_df_predict_name, df_predict, verbose=False)
# Compose the r_string
self.r_predict_string = 'predict(' + self.model_name + ', newdata=' +\
self.r_df_predict_name + ', allow.new.levels=TRUE)'
# Run the prediction
self.df_preds = pandas.DataFrame(r(self.r_predict_string))
# Convert to probabilities and either-or predictions
self.df_preds = self.df_preds.rename(columns={0:'preds'})
# Concatenate prediction DataFrame
#self.df_preds = pandas.concat([df_predict, self.df_preds], axis=1)
|
def benchmark(func_to_decorate):
import time
def wrapper():
start = time.time()
func_to_decorate()
end = time.time()
print(f"Time: {end - start}")
return wrapper
@benchmark
def fetch_web_page():
import requests
webpage = requests.get("https://google.com")
print(webpage)
# fetch_web_page = benchmark(fetch_web_page)
# fetch_web_page()
def set_arguments(func_to_decorate):
import functools
@functools.wraps(func_to_decorate)
def wrapper(arg1, arg2):
print("Set arguments here", arg1, arg2)
func_to_decorate(arg1, arg2)
return wrapper
@set_arguments
def print_full_name(first, second):
print("My name is:", first, second)
print_full_name("Alexander", "Mustache")
print(print_full_name)
|
import urllib3
from appwrite.client import Client
from appwrite.services.users import Users
from appwrite.services.database import Database
from appwrite.services.storage import Storage
import datetime
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Helper method to print green colored output.
def print_green(prt):
print("\033[32;1m"+str(prt)+"\033[0m")
# Config
# Read the docs at https://appwrite.io/docs to get more information
# about API keys and Project IDs
ENDPOINT = 'https://localhost/v1'
PROJECT_ID = '<Project ID>'
API_KEY = '<Secret API Key>'
client = Client()
client.set_endpoint(ENDPOINT)
client.set_project(PROJECT_ID)
client.set_key(API_KEY)
collectionId = None
userId = None
# API Calls
# - api.create_collection
# - api.list_collection
# - api.add_doc
# - api.list_doc
# - api.upload_file
# - api.list_files
# - api.delete_file
# - api.create_user
# - api.list_user
# List of API definitions
def create_collection():
global collectionId
database = Database(client)
print_green("Running Create Collection API")
response = database.create_collection(
'Movies',
['*'],
['*'],
[
{'label': "Name", 'key': "name", 'type': "text",
'default': "Empty Name", 'required': True, 'array': False},
{'label': 'release_year', 'key': 'release_year', 'type': 'numeric',
'default': 1970, 'required': True, 'array': False}
]
)
collectionId = response['$id']
print(response)
def list_collection():
database = Database(client)
print_green("Running List Collection API")
response = database.list_collections()
collection = response['collections'][0]
print(collection)
def add_doc():
database = Database(client)
print_green("Running Add Document API")
response = database.create_document(
collectionId,
{
'name': "Spider Man",
'release_year': 1920,
},
['*'],
['*']
)
print(response)
def list_doc():
database = Database(client)
print_green("Running List Document API")
response = database.list_documents(collectionId)
print(response)
def upload_file():
storage = Storage(client)
print_green("Running Upload File API")
response = storage.create_file(
open("./nature.jpg", 'rb'),
[],
[]
)
def list_files():
storage = Storage(client)
print_green("Running List Files API")
result = storage.list_files()
file_count = result['sum']
print("Total number of files {} ".format(file_count))
files = result['files']
print(files)
def delete_file():
storage = Storage(client)
print_green("Running Delete File API")
result = storage.list_files()
first_file_id = result['files'][0]['$id']
response = storage.delete_file(first_file_id)
print(response)
def create_user(email, password, name):
global userId
users = Users(client)
print_green("Running Create User API")
response = users.create(
email,
password,
name
)
userId = response['$id']
print(response)
def list_user():
users = Users(client)
print_green("Running List User API")
response = users.list()
print(response)
def run_all_tasks():
name = str(datetime.datetime.now()).split()[0]
create_collection()
list_collection()
add_doc()
list_doc()
upload_file()
list_files()
delete_file()
create_user(
name + '@test.com',
name + '@123',
name
)
list_user()
if __name__ == "__main__":
run_all_tasks()
print_green("Successfully ran playground!")
|
# __init__.py
# ALS 2017/05/11
__all__ = ['sdssobj']
from . import sdssobj
import imp
imp.reload(sdssobj)
from .sdssobj import sdssObj
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Add url and reference_url columns
Create Date: 2016-05-13 13:51:06.534663
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '47bf3f1f9be8'
down_revision = '17ae137bda7a'
def upgrade():
"""Add url and reference_url columns"""
op.add_column("risks",
sa.Column("url", sa.String(length=250),
nullable=True))
op.add_column("risks",
sa.Column("reference_url", sa.String(length=250),
nullable=True))
def downgrade():
op.drop_column("risks", "url")
op.drop_column("risks", "reference_url")
|
# -*- coding:utf-8 -*-
import sys
import os
import argparse
import numpy as np
from PIL import Image, ImageEnhance, ImageOps, ImageFile
# C:\Users\14542\Desktop\t
def main(args):
images_inputpath = os.path.join(args.input_dir, 'val_img')
gtpath_inputpath = os.path.join(args.input_dir, 'val_gt')
files = os.listdir(images_inputpath)
txt = open('val.txt','a')
for file in files:
str= '%s%s%s' % (os.path.join(images_inputpath,file),' ',os.path.join(gtpath_inputpath,file))
print(str)
txt.write(str)
txt.close()
'''
def main(args):
dataset= []
images_inputpath=os.path.join(args.input_dir,'images')
gtpath_inputpath=os.path.join(args.input_dir,'gt')
images_outputpath = os.path.join(args.output_dir, 'images')
gtpath_outputpath = os.path.join(args.output_dir, 'gt')
files = os.listdir(images_inputpath)
for file in files:
file_name, file_extension=os.path.splitext(file)
img = Image.open(os.path.join(images_inputpath,file))
gt = Image.open(os.path.join(gtpath_inputpath,file))
for i in range(150):
cropped_img, cropped_gt = randomCrop(img, gt)
output_img_n = "{}_{}{}".format(file_name, i, file_extension)
cropped_img.save(os.path.join(images_outputpath,output_img_n))
cropped_gt.save(os.path.join(gtpath_outputpath,output_img_n))
'''
def randomCrop(image, gt):
data_length=5000
win_length=768
boundary= data_length - win_length
"""
对图像随意剪切,考虑到图像大小范围(68,68),使用一个一个大于(36*36)的窗口进行截图
:param image: PIL的图像image
:return: 剪切之后的图像
"""
crop_win_height = np.random.randint(0, boundary)
crop_win_width= np. random.randint(0, boundary)
random_region = (crop_win_width, crop_win_height, crop_win_width + win_length, crop_win_height+ win_length)
return image.crop(random_region), gt.crop(random_region)
def arguments(argv):
parser= argparse.ArgumentParser()
parser.add_argument('input_dir',type=str,help='Directory with raw data')
parser.add_argument('output_dir', type=str, help='Directory with cropped data')
# parser.add_argument('file_extension', type=str, help='Directory with cropped data',default='.tif')
return parser.parse_args(argv)
# C:\Users\14542\Desktop\t
if __name__ == '__main__':
main(arguments(sys.argv[1:]))
|
###
# Copyright (c) SpiderDave
# Copyright (c) 2020, oddluck <oddluck@riseup.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import re
import random
import supybot.conf as conf
import supybot.utils as utils
from supybot.commands import *
import supybot.ircmsgs as ircmsgs
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
import time
import os, errno
import pickle
# This will be used to change the name of the class to the folder name
PluginName=os.path.dirname( __file__ ).split(os.sep)[-1]
class _Plugin(callbacks.Plugin):
"""
Implementation of games (Undercut, Flaunt, SuperFlaunt) described
in Metamagical Themas by Douglas Hoffsteder.
"""
threaded = True
game=[{},{},{},{},{}]
channeloptions = {}
channeloptions['allow_game']=False
channeloptions['debug']=False
channeloptions['use_queue']=True
channeloptions['undercut_goal']=40
channeloptions['flaunt1_goal']=40
channeloptions['flaunt2_goal']=200
channeloptions['flaunt3_goal']=40
lastgame=time.time()
def make_sure_path_exists(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
make_sure_path_exists(r'%s%sundercut' % (conf.supybot.directories.data(),os.sep))
dataPath=r'%s%sundercut%s' % (conf.supybot.directories.data(),os.sep,os.sep)
prefixChar = conf.supybot.reply.whenAddressedBy.chars()[0]
def ucstart(self, irc, msg, args, text):
"""[<gametype>]
Start a new game of Undercut/Flaunt. For the rules of the game, use the ucrules command.
Valid game types are undercut, flaunt1, flaunt2, and flaunt3.
"""
try:
self._read_options(irc)
except:
pass
if self.channeloptions['allow_game']==False:
irc.reply('Error: allow_game=False')
return
if text:
gametype=text.lower().strip()
if gametype.replace(' ', '')=='globalthermalnuclearwar':
irc.reply('Curious game. The only winning move is not to play.')
return
if gametype not in ['undercut', 'flaunt1', 'flaunt2', 'flaunt3']:
irc.reply('Error: Invald game type %s.' % gametype)
return
else:
gametype='undercut'
nick=msg.nick
table=self._gettablefromnick(nick)
if table != None:
gametype=self.game[table].get('type').capitalize()
irc.reply('Error: You are already in a game of %s.' % gametype)
return
table=self._getopentable()
if table==None:
irc.reply('Sorry, all the game tables are in use at the moment.')
return
self._cleanup(table)
self.game[table]['channel']=msg.args[0]
self.game[table]['type']=gametype
goal=self.channeloptions[gametype+'_goal']
self.game[table]['goal']=goal
self.game[table]['players'][nick]={'score':0}
self.game[table]['players'][nick]['numbers']=[0]
irc.reply('%s has started a new game of %s at table %s. For the rules of the game, type ".ucrules". To accept this challenge, join with .ucjoin.' % (nick, gametype.capitalize(), table+1), prefixNick=False)
self.game[table]['phase']='join'
ucstart = wrap(ucstart, ['public', optional('something')])
def ucrules(self, irc, msg, args):
"""takes no arguments
Display rules for Undercut/Flaunt.
"""
irc.reply('Rules for Undercut/Flaunt: http://pastebin.com/raw.php?i=9cZ6ykWX Start a game with .ucstart <gametype>. Valid gametypes are undercut, flaunt1, flaunt2, and flaunt3.')
ucrules=wrap(ucrules)
def ucjoin(self, irc, msg, args, table, fakenick):
"""[<table>]
Join a game of Undercut/Flaunt previously started with the ucstart command.
Specify <table> if there is more than one game to join in that channel.
"""
try:
self._read_options(irc)
except:
pass
if self.channeloptions['allow_game']==False:
irc.reply('Error: allow_game=False')
return
nick=msg.nick
if table !=None: table-=1 # make tables zero based
tables=self._getcurrenttables()
if not tables:
# no games running
irc.reply('Error: There are no games to join.')
return
if table !=None and table not in tables:
# given table doesn't have a game going
if table not in list(range(len(self.game))):
irc.reply("Error: That table doesn't exist")
return
irc.reply("Error: There is no game at that table")
return
tables=[t for t in tables if self.game[t]['channel']==msg.args[0]]
if table !=None:
if table not in tables:
irc.reply('Error: That table is in another channel.')
return
tables=[table] # success!
if len(tables)==0:
irc.reply('Error: There are no games to join in this channel.')
return
elif len(tables)==1:
table=tables[0]
else:
messagetxt="Please specify which table you'd like to play at (ucjoin <table>). Current tables are: "
for t in tables:
messagetxt+='Table %s (%s), ' % (t+1, ' '.join(list(self.game[t]['players'].keys())))
messagetxt=messagetxt.rsplit(', ',1)[0]+'.'
irc.reply(messagetxt)
return
isfake=False
iscpu=False
if ((self.channeloptions['debug']) and fakenick) or (fakenick and fakenick.lower()=='cpu'):
nick=fakenick
isfake=True
if fakenick.lower()=='cpu': iscpu=True
if self.game[table]['phase']=='join':
if nick in list(self.game[table]['players'].keys()):
irc.reply('Error: you have already joined.')
return
self.game[table]['players'][nick]={'score':0}
self.game[table]['players'][nick]['numbers']=[0]
irc.reply('Game started! Use .ucplay (privately) to play a number from 1 to 5.', prefixNick=False, to=self.game[table]['channel'])
self.game[table]['phase']='running'
else:
if self.game[table]['phase']=='running':
irc.reply('Error: Game already running.')
return
elif self.game[table]['phase']=='':
irc.reply('Error: You need to create a game with .ucstart first.')
return
else:
# don't know when this would happen, but whatever
irc.reply('Error: not join phase.')
return
ucjoin = wrap(ucjoin, ['public', optional('int'), optional('something')])
def ucleave(self, irc, msg, args, fakenick):
"""takes no arguments
Leave a game of Undercut/Flaunt.
"""
try:
self._read_options(irc)
except:
pass
if self.channeloptions['allow_game']==False:
irc.reply('Error: allow_game=False')
return
nick=msg.nick
if self.channeloptions['debug'] and fakenick:
nick=fakenick
table=self._gettablefromnick(nick)
if table==None:
irc.reply('Error: You are not playing a game at any of the tables.')
return
irc.reply('%s has left the game.' % nick, prefixNick=False, to=self.game[table]['channel'])
del self.game[table]['players'][nick]
winner=[p for p in self.game[table]['players']]
if len(winner)>0:
winner=winner[0]
irc.reply('%s wins!' % winner, prefixNick=False, to=self.game[table]['channel'])
else:
irc.reply('The game has been cancelled.', prefixNick=False, to=self.game[table]['channel'])
self.game[table]['phase']='gameover'
self._cleanup(table)
ucleave = wrap(ucleave, ['public', optional('something')])
def _leavegame(self, irc, msg, nick):
"""takes no arguments
Leave a game of Undercut/Flaunt.
"""
try:
self._read_options(irc)
except:
pass
if self.channeloptions['allow_game']==False:
irc.reply('Error: allow_game=False')
return
table=self._gettablefromnick(nick)
if table==None:
#irc.reply('Error: You are not playing a game at any of the tables.')
return
#irc.reply('%s has left the game.' % nick, prefixNick=False, to=self.game[table]['channel'])
# ---- replace with cpu ----
# ** old uno specific stuff before I split it off;
# may want to adapt it for this **
# oldnick=nick
# nick=self._uno_make_cpu(table)
# del self.game[table]['players'][nick] # remove new cpu player (we just want the nick)
# self.game[table]['players'][nick]=self.game[table]['players'][oldnick]
# del self.game[table]['players'][oldnick]
# self.game[table]['players'][nick]['fake']=True
# self.game[table]['players'][nick]['cpu']=True
# irc.reply('%s has been replaced by %s.' % (oldnick, nick), prefixNick=False, to=self.game[table]['channel'])
# return
def ucplay(self, irc, msg, args, number, fakenick):
"""<number>
Play a <number> for the Undercut/Flaunt games. This command should
be used in a private message."""
nick=msg.nick
if self.channeloptions['debug'] and fakenick:
nick=fakenick
table=self._gettablefromnick(nick)
if table==None:
irc.reply('Error: You are not playing a game at any of the tables.')
return
if self.game[table]['phase']=='running':
if nick not in self.game[table]['players']:
irc.reply("Error: You're not playing this game.")
return
if number not in list(range(1,5+1)):
irc.reply('Error: You must play a number between 1 and 5.')
return
opponent=[p for p in self.game[table]['players'] if p !=nick][0]
if len(self.game[table]['players'][opponent]['numbers'])==len(self.game[table]['players'][nick]['numbers']):
self.game[table]['players'][nick]['numbers'].append(number)
irc.reply('%s made his move.' % nick, to=self.game[table]['channel'])
elif len(self.game[table]['players'][opponent]['numbers'])<len(self.game[table]['players'][nick]['numbers']):
irc.reply('Error: You must wait for your opponent.')
elif len(self.game[table]['players'][opponent]['numbers'])>len(self.game[table]['players'][nick]['numbers']):
self.game[table]['players'][nick]['numbers'].append(number)
irc.reply('%s made his move.' % (nick), to=self.game[table]['channel'])
players=[p for p in self.game[table]['players']]
numbers=[self.game[table]['players'][p]['numbers'][-1] for p in players]
point=[0,0]
flaunttxt=['','']
gametype=self.game[table]['type']
if gametype=='undercut':
if numbers[0]==numbers[1]-1:
undercut=0
point[0]=sum(numbers)
elif numbers[1]==numbers[0]-1:
undercut=1
point[1]=sum(numbers)
else:
undercut=None
point[0]=numbers[0]
point[1]=numbers[1]
elif gametype=='flaunt1':
flaunt=[0,0]
for p in range(len(players)):
for i in range(len(self.game[table]['players'][players[p]]['numbers'])-1):
if self.game[table]['players'][players[p]]['numbers'][-i-2] != self.game[table]['players'][players[p]]['numbers'][-i-1]:
flaunt[p]=i+1
break;
if numbers[0]==numbers[1]-1:
undercut=0
point[0]=sum(numbers)
point[0]=numbers[0]*flaunt[0]+numbers[1]*flaunt[1]
elif numbers[1]==numbers[0]-1:
undercut=1
point[1]=sum(numbers)
point[1]=numbers[1]*flaunt[1]+numbers[0]*flaunt[0]
else:
undercut=None
point[0]=numbers[0]*flaunt[0]
point[1]=numbers[1]*flaunt[1]
flaunttxt[0]='x%s' % flaunt[0]
flaunttxt[1]='x%s' % flaunt[1]
flaunttxt[0]=flaunttxt[0].replace('x1','')
flaunttxt[1]=flaunttxt[1].replace('x1','')
elif gametype=='flaunt2':
flaunt=[0,0]
for p in range(len(players)):
for i in range(len(self.game[table]['players'][players[p]]['numbers'])-1):
if self.game[table]['players'][players[p]]['numbers'][-i-2] != self.game[table]['players'][players[p]]['numbers'][-i-1]:
flaunt[p]=i+1
break;
if numbers[0]==numbers[1]-1:
undercut=0
point[0]=sum(numbers)
point[0]=numbers[0]**flaunt[0]+numbers[1]**flaunt[1]
elif numbers[1]==numbers[0]-1:
undercut=1
point[1]=sum(numbers)
point[1]=numbers[1]**flaunt[1]+numbers[0]**flaunt[0]
else:
undercut=None
point[0]=numbers[0]**flaunt[0]
point[1]=numbers[1]**flaunt[1]
flaunttxt[0]='^%s' % flaunt[0]
flaunttxt[1]='^%s' % flaunt[1]
flaunttxt[0]=flaunttxt[0].replace('^1','')
flaunttxt[1]=flaunttxt[1].replace('^1','')
elif gametype=='flaunt3':
flaunt=[0,0]
for p in range(len(players)):
for i in range(len(self.game[table]['players'][players[p]]['numbers'])-1):
if self.game[table]['players'][players[p]]['numbers'][-i-2] != self.game[table]['players'][players[p]]['numbers'][-i-1]:
flaunt[p]=i
break;
if numbers[0]==numbers[1]-1:
undercut=0
point[0]=sum(numbers)
point[0]=numbers[0]+flaunt[0]+numbers[1]+flaunt[1]
elif numbers[1]==numbers[0]-1:
undercut=1
point[1]=sum(numbers)
point[1]=numbers[1]+flaunt[1]+numbers[0]+flaunt[0]
else:
undercut=None
point[0]=numbers[0]+flaunt[0]
point[1]=numbers[1]+flaunt[1]
flaunttxt[0]='+%s' % flaunt[0]
flaunttxt[1]='+%s' % flaunt[1]
flaunttxt[0]=flaunttxt[0].replace('+0','')
flaunttxt[1]=flaunttxt[1].replace('+0','')
boldplayer=None
if point[0]>point[1]: boldplayer=0
if point[1]>point[0]: boldplayer=1
txt=''
for p in range(len(players)):
if p==boldplayer:
txt+='%s: ' % players[p]
if p==undercut:
txt+=ircutils.bold('%s%s (undercut!)' % (numbers[p], flaunttxt[p]))
else:
txt+=ircutils.bold('%s%s' % (numbers[p], flaunttxt[p]))
else:
txt+='%s: %s%s' % (players[p], numbers[p], flaunttxt[p])
if p < len(players)-1:
txt+=', '
messagetxt=txt
messagetxt+=' // %s points for %s, %s points for %s.' % (point[0],players[0],point[1],players[1])
for i in range(len(players)):
self.game[table]['players'][players[i]]['score']+=point[i]
scores=[self.game[table]['players'][p]['score'] for p in players]
messagetxt+=' Total: %s(%s), %s(%s).' % (players[0], scores[0], players[1], scores[1])
irc.reply(messagetxt, to=self.game[table]['channel'])
if scores[0]>=self.game[table]['goal'] and scores[0]==scores[1]:
# both >= 40, equal scores
irc.reply("The game is over. It's a tie!", to=self.game[table]['channel'])
self.game[table]['phase']='gameover'
elif scores[0]>=self.game[table]['goal'] and scores[1]>self.game[table]['goal']:
# both >= 40, but different scores
if scores[0]>scores[1]:
winner=players[0]
else:
winner=players[1]
irc.reply('The game is over. %s wins!' % winner, to=self.game[table]['channel'])
self.game[table]['phase']='gameover'
elif scores[0]>=self.game[table]['goal']:
# first player wins
irc.reply('The game is over. %s wins!' % players[0], to=self.game[table]['channel'])
self.game[table]['phase']='gameover'
elif scores[1]>=self.game[table]['goal']:
# second player wins
irc.reply('The game is over. %s wins!' % players[1], to=self.game[table]['channel'])
self.game[table]['phase']='gameover'
#irc.reply('%s' % self.game)
if self.game[table]['phase']=='gameover':
self._cleanup(table)
else:
irc.reply('Error: game not running')
ucplay = wrap(ucplay, ['private', 'int', optional('something')])
def ucsetoption(self, irc, msg, args, channel, text, value):
"""<option> <value>
Changes an option for the Undercut/Flaunt games. You can view the
options for the current channel with the ucshowoptions command."""
try:
self._read_options(irc)
except:
pass
if value.lower()=='true':
value=True
elif value.lower()=='false':
value=False
elif value.lower()=='unset':
if text in self.channeloptions:
irc.reply('Set %s %s-->(unset)' % (text, self.channeloptions[text]))
del self.channeloptions[text]
try:
self._write_options(irc)
except:
irc.reply('Failed to write options to file. :(')
else:
irc.reply('%s was already unset.' % text)
return
if text in self.channeloptions:
irc.reply('Set %s %s-->%s' % (text, self.channeloptions[text], value))
self.channeloptions[text]=value
else:
irc.reply('Set %s (unset)-->%s' % (text, value))
self.channeloptions[text]=value
try:
self._write_options(irc)
except:
irc.reply('Failed to write options to file. :(')
ucsetoption = wrap(ucsetoption, [('checkChannelCapability', 'op'), 'something', 'something'])
def ucshowoptions(self, irc, msg, args):
"""(takes no arguments)
Shows options for the Undercut/Flaunt games for the current channel."""
try:
self._read_options(irc)
except:
pass
txt=', '.join(['='.join([str(i) for i in item]) for item in list(self.channeloptions.items())])
irc.reply(txt)
ucshowoptions = wrap(ucshowoptions)
def _cleanup(self, table):
self.game[table]={}
self.game[table]['players']={}
self.game[table]['phase']=''
def _getopentable(self):
openslot=[i for i in range(len(self.game)) if not self.game[i].get('phase')]
if len(openslot)==0:
return None
else:
return openslot[0]
def _getcurrenttables(self):
slot=[i for i in range(len(self.game)) if self.game[i].get('phase')]
return slot
def _gettablefromnick(self, n):
tables=self._getcurrenttables()
if not tables: return None
for table in tables:
#if n.lower() in map(lambda x:x.lower(), self.game[table]['players'].keys()):
if n.lower() in [x.lower() for x in list(self.game[table]['players'].keys())]:
return table
return None
def _read_options(self, irc):
network=irc.network.replace(' ','_')
channel=irc.msg.args[0]
#irc.reply('test: %s.%s.options' % (irc.network, irc.msg.args[0] ))
f="%s%s.%s.options" % (self.dataPath, network, channel)
if os.path.isfile(f):
inputfile = open(f, "rb")
self.channeloptions = pickle.load(inputfile)
inputfile.close()
else:
# Use defaults
channeloptions = {}
channeloptions['allow_game']=False
channeloptions['debug']=False
channeloptions['use_queue']=True
channeloptions['undercut_goal']=40
channeloptions['flaunt1_goal']=40
channeloptions['flaunt2_goal']=200
channeloptions['flaunt3_goal']=40
return
def _write_options(self, irc):
network=irc.network.replace(' ','_')
channel=irc.msg.args[0]
outputfile = open("%s%s.%s.options" % (self.dataPath, network, channel), "wb")
pickle.dump(self.channeloptions, outputfile)
outputfile.close()
# def _get_default_options(self):
# self.channeloptions = {}
# self.channeloptions['allow_game']=False
# self.channeloptions['debug']=False
def doNick(self, irc, msg):
oldNick = msg.nick
newNick = msg.args[0]
table=self._gettablefromnick(oldNick)
if table == None:
return
self.game[table]['players'][newNick]=self.game[table]['players'][oldNick]
del self.game[table]['players'][oldNick]
def doQuit(self, irc, msg):
nick=msg.nick
table=self._gettablefromnick(nick)
if table == None:
return
self._leavegame(irc, msg, nick)
def doPart(self, irc, msg):
#self.log.info('doPart debug: msg.args[0]=%s, msg.args[1]=%s, msg.command=%s, msg.nick=%s' % (msg.args[0], msg.args[1], msg.command, msg.nick))
nick=msg.nick
table=self._gettablefromnick(nick)
if table == None:
return
if msg.args[0] == self.game[table]['channel']:
self._leavegame(irc, msg, nick)
def doKick(self, irc, msg):
(channel, nicks) = msg.args[:2]
nicks=nicks.split(',')
for nick in nicks:
table=self._gettablefromnick(nick)
if table!=None:
self._leavegame(irc, msg, nick)
def _sendMsg(self, irc, msg):
if self.channeloptions['use_queue']:
irc.queueMsg(msg)
else:
irc.sendMsg(msg)
irc.noReply()
def reply(self, irc, text, action=False, private=False, prefixNick=False, to='', fast=False):
table=self._gettablefromnick(to)
if table == None:
# hopefully it's a valid channel
pass
else:
if self.game[table]['players'][to].get('fake'):
if self.channeloptions['debug']:
text='(to %s): %s' % (to, text)
text=ircutils.mircColor(text, fg=14)
to=self.game[table]['channel']
else:
# No need to show cpu actions anywhere if debug is false.
return
if action==True or fast==False:
irc.reply(text, action=action, private=private, prefixNick=prefixNick, to=to)
else:
if (prefixNick) and ('#' not in to):
text='%s: %s' % (to, text)
m=ircmsgs.privmsg(to, text)
self._sendMsg(irc, m)
_Plugin.__name__=PluginName
Class = _Plugin
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
import os, sys, time
path = os.path.join(os.path.dirname(__file__), '../lib/')
sys.path.insert(0, path)
from thrift.transport import THttpClient
from thrift.protocol import TCompactProtocol
from curve import LineService
from curve.ttypes import *
class Poll:
client = None
auth_query_path = "/api/v4/TalkService.do";
http_query_path = "/S4";
polling_path = "/P4";
host = "gd2.line.naver.jp";
port = 443;
UA = "Line/7.18.4 iPad4,1 9.0.2"
LA = "CHROMEOS\t8.1.1\tiyo-PC\t8.1.1"
rev = 0
def __init__(self, authToken):
self.transport = THttpClient.THttpClient('https://gd2.line.naver.jp:443'+ self.http_query_path)
self.transport.setCustomHeaders({
"User-Agent" : self.UA,
"X-Line-Application" : self.LA,
"X-Line-Access": authToken
});
self.protocol = TCompactProtocol.TCompactProtocol(self.transport);
self.client = LineService.Client(self.protocol)
self.rev = self.client.getLastOpRevision()
self.transport.path = self.polling_path
self.transport.open()
def stream(self, sleep=50000):
#usleep = lambda x: time.sleep(x/1000000.0)
while True:
try:
Ops = self.client.fetchOps(self.rev, 5)
except EOFError:
raise Exception("It might be wrong revision\n" + str(self.rev))
for Op in Ops:
# print Op.type
if (Op.type != OpType.END_OF_OPERATION):
self.rev = max(self.rev, Op.revision)
return Op
#usleep(sleep)
|
from setuptools import setup
setup(
name = "Catnap",
version = "0.4.5",
description = "A script for running integration tests against RESTful/HTTP-based interfaces",
author = "Yusuf Simonson",
url = "http://github.com/dailymuse/catnap",
packages = [
"catnap",
],
scripts = ["scripts/catnap"],
install_requires = [
"PyYAML==3.10",
"requests==1.2.3",
"optfn==0.4.0"
]
)
|
from .models import MemberData
from ..enums import Permission
class IMemberDataProvider:
def get_basic_data_by_id(self, server_id: str, user_id: str) -> MemberData:
raise NotImplementedError
def get_basic_data_by_name(self, server_id: str, name: str) -> MemberData:
raise NotImplementedError
def is_member(self, server_id: str, user_id: str) -> bool:
raise NotImplementedError
def get_member_permissions(self, server_id: str, channel_id: str, user_id: str) -> Permission:
raise NotImplementedError
|
# terrascript/data/kvrhdn/honeycombio.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:18:50 UTC)
import terrascript
class honeycombio_datasets(terrascript.Data):
pass
class honeycombio_query(terrascript.Data):
pass
class honeycombio_trigger_recipient(terrascript.Data):
pass
__all__ = [
"honeycombio_datasets",
"honeycombio_query",
"honeycombio_trigger_recipient",
]
|
# Signals here
|
from Node import Node
from collections import deque
def search(state, goal_state, yield_after):
cur_node = Node(state)
explored = set()
queue = deque([cur_node])
counter = 0
while len(queue) != 0:
cur_node = queue.popleft()
explored.add(cur_node.map)
if cur_node.is_goal(goal_state):
break
cur_node.expand()
for child in cur_node.children:
if child.map not in explored:
queue.append(child)
explored.add(child.map)
counter += 1
if counter%yield_after == 0:
yield [0, cur_node.state]
expanded_states = [cur_node.state]
for parent in cur_node.ancestors():
expanded_states.append(parent.state)
expanded_states.reverse()
yield [1, cur_node.state, expanded_states, counter, cur_node.depth+1]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'NetworkSegment',
'PortAllowedAddressPair',
'PortBinding',
'PortExtraDhcpOption',
'PortFixedIp',
'RouterExternalFixedIp',
'RouterVendorOptions',
'SubnetAllocationPool',
'SubnetAllocationPoolsCollection',
'SubnetHostRoute',
'TrunkSubPort',
'GetPortAllowedAddressPairResult',
'GetPortBindingResult',
'GetPortExtraDhcpOptionResult',
'GetRouterExternalFixedIpResult',
'GetSubnetAllocationPoolResult',
'GetSubnetHostRouteResult',
'GetTrunkSubPortResult',
]
@pulumi.output_type
class NetworkSegment(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "networkType":
suggest = "network_type"
elif key == "physicalNetwork":
suggest = "physical_network"
elif key == "segmentationId":
suggest = "segmentation_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NetworkSegment. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NetworkSegment.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NetworkSegment.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
network_type: Optional[str] = None,
physical_network: Optional[str] = None,
segmentation_id: Optional[int] = None):
"""
:param str network_type: The type of physical network.
:param str physical_network: The physical network where this network is implemented.
:param int segmentation_id: An isolated segment on the physical network.
"""
if network_type is not None:
pulumi.set(__self__, "network_type", network_type)
if physical_network is not None:
pulumi.set(__self__, "physical_network", physical_network)
if segmentation_id is not None:
pulumi.set(__self__, "segmentation_id", segmentation_id)
@property
@pulumi.getter(name="networkType")
def network_type(self) -> Optional[str]:
"""
The type of physical network.
"""
return pulumi.get(self, "network_type")
@property
@pulumi.getter(name="physicalNetwork")
def physical_network(self) -> Optional[str]:
"""
The physical network where this network is implemented.
"""
return pulumi.get(self, "physical_network")
@property
@pulumi.getter(name="segmentationId")
def segmentation_id(self) -> Optional[int]:
"""
An isolated segment on the physical network.
"""
return pulumi.get(self, "segmentation_id")
@pulumi.output_type
class PortAllowedAddressPair(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipAddress":
suggest = "ip_address"
elif key == "macAddress":
suggest = "mac_address"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PortAllowedAddressPair. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PortAllowedAddressPair.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PortAllowedAddressPair.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ip_address: str,
mac_address: Optional[str] = None):
"""
:param str ip_address: The additional IP address.
:param str mac_address: The additional MAC address.
"""
pulumi.set(__self__, "ip_address", ip_address)
if mac_address is not None:
pulumi.set(__self__, "mac_address", mac_address)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> str:
"""
The additional IP address.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> Optional[str]:
"""
The additional MAC address.
"""
return pulumi.get(self, "mac_address")
@pulumi.output_type
class PortBinding(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "hostId":
suggest = "host_id"
elif key == "vifDetails":
suggest = "vif_details"
elif key == "vifType":
suggest = "vif_type"
elif key == "vnicType":
suggest = "vnic_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PortBinding. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PortBinding.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PortBinding.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
host_id: Optional[str] = None,
profile: Optional[str] = None,
vif_details: Optional[Mapping[str, Any]] = None,
vif_type: Optional[str] = None,
vnic_type: Optional[str] = None):
"""
:param str host_id: The ID of the host to allocate port on.
:param str profile: Custom data to be passed as `binding:profile`. Data
must be passed as JSON.
:param Mapping[str, Any] vif_details: A map of JSON strings containing additional
details for this specific binding.
:param str vif_type: The VNIC type of the port binding.
:param str vnic_type: VNIC type for the port. Can either be `direct`,
`direct-physical`, `macvtap`, `normal`, `baremetal` or `virtio-forwarder`.
Default value is `normal`.
"""
if host_id is not None:
pulumi.set(__self__, "host_id", host_id)
if profile is not None:
pulumi.set(__self__, "profile", profile)
if vif_details is not None:
pulumi.set(__self__, "vif_details", vif_details)
if vif_type is not None:
pulumi.set(__self__, "vif_type", vif_type)
if vnic_type is not None:
pulumi.set(__self__, "vnic_type", vnic_type)
@property
@pulumi.getter(name="hostId")
def host_id(self) -> Optional[str]:
"""
The ID of the host to allocate port on.
"""
return pulumi.get(self, "host_id")
@property
@pulumi.getter
def profile(self) -> Optional[str]:
"""
Custom data to be passed as `binding:profile`. Data
must be passed as JSON.
"""
return pulumi.get(self, "profile")
@property
@pulumi.getter(name="vifDetails")
def vif_details(self) -> Optional[Mapping[str, Any]]:
"""
A map of JSON strings containing additional
details for this specific binding.
"""
return pulumi.get(self, "vif_details")
@property
@pulumi.getter(name="vifType")
def vif_type(self) -> Optional[str]:
"""
The VNIC type of the port binding.
"""
return pulumi.get(self, "vif_type")
@property
@pulumi.getter(name="vnicType")
def vnic_type(self) -> Optional[str]:
"""
VNIC type for the port. Can either be `direct`,
`direct-physical`, `macvtap`, `normal`, `baremetal` or `virtio-forwarder`.
Default value is `normal`.
"""
return pulumi.get(self, "vnic_type")
@pulumi.output_type
class PortExtraDhcpOption(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipVersion":
suggest = "ip_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PortExtraDhcpOption. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PortExtraDhcpOption.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PortExtraDhcpOption.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
value: str,
ip_version: Optional[int] = None):
"""
:param str name: Name of the DHCP option.
:param str value: Value of the DHCP option.
:param int ip_version: IP protocol version. Defaults to 4.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
if ip_version is not None:
pulumi.set(__self__, "ip_version", ip_version)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the DHCP option.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
Value of the DHCP option.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter(name="ipVersion")
def ip_version(self) -> Optional[int]:
"""
IP protocol version. Defaults to 4.
"""
return pulumi.get(self, "ip_version")
@pulumi.output_type
class PortFixedIp(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "subnetId":
suggest = "subnet_id"
elif key == "ipAddress":
suggest = "ip_address"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PortFixedIp. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PortFixedIp.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PortFixedIp.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
subnet_id: str,
ip_address: Optional[str] = None):
"""
:param str subnet_id: Subnet in which to allocate IP address for
this port.
:param str ip_address: The additional IP address.
"""
pulumi.set(__self__, "subnet_id", subnet_id)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
"""
Subnet in which to allocate IP address for
this port.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[str]:
"""
The additional IP address.
"""
return pulumi.get(self, "ip_address")
@pulumi.output_type
class RouterExternalFixedIp(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipAddress":
suggest = "ip_address"
elif key == "subnetId":
suggest = "subnet_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RouterExternalFixedIp. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RouterExternalFixedIp.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RouterExternalFixedIp.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ip_address: Optional[str] = None,
subnet_id: Optional[str] = None):
"""
:param str ip_address: The IP address to set on the router.
:param str subnet_id: Subnet in which the fixed IP belongs to.
"""
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[str]:
"""
The IP address to set on the router.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[str]:
"""
Subnet in which the fixed IP belongs to.
"""
return pulumi.get(self, "subnet_id")
@pulumi.output_type
class RouterVendorOptions(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "setRouterGatewayAfterCreate":
suggest = "set_router_gateway_after_create"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RouterVendorOptions. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RouterVendorOptions.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RouterVendorOptions.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
set_router_gateway_after_create: Optional[bool] = None):
"""
:param bool set_router_gateway_after_create: Boolean to control whether
the Router gateway is assigned during creation or updated after creation.
"""
if set_router_gateway_after_create is not None:
pulumi.set(__self__, "set_router_gateway_after_create", set_router_gateway_after_create)
@property
@pulumi.getter(name="setRouterGatewayAfterCreate")
def set_router_gateway_after_create(self) -> Optional[bool]:
"""
Boolean to control whether
the Router gateway is assigned during creation or updated after creation.
"""
return pulumi.get(self, "set_router_gateway_after_create")
@pulumi.output_type
class SubnetAllocationPool(dict):
def __init__(__self__, *,
end: str,
start: str):
"""
:param str end: The ending address.
:param str start: The starting address.
"""
pulumi.set(__self__, "end", end)
pulumi.set(__self__, "start", start)
@property
@pulumi.getter
def end(self) -> str:
"""
The ending address.
"""
return pulumi.get(self, "end")
@property
@pulumi.getter
def start(self) -> str:
"""
The starting address.
"""
return pulumi.get(self, "start")
@pulumi.output_type
class SubnetAllocationPoolsCollection(dict):
def __init__(__self__, *,
end: str,
start: str):
"""
:param str end: The ending address.
:param str start: The starting address.
"""
pulumi.set(__self__, "end", end)
pulumi.set(__self__, "start", start)
@property
@pulumi.getter
def end(self) -> str:
"""
The ending address.
"""
return pulumi.get(self, "end")
@property
@pulumi.getter
def start(self) -> str:
"""
The starting address.
"""
return pulumi.get(self, "start")
@pulumi.output_type
class SubnetHostRoute(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "destinationCidr":
suggest = "destination_cidr"
elif key == "nextHop":
suggest = "next_hop"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SubnetHostRoute. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SubnetHostRoute.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SubnetHostRoute.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
destination_cidr: str,
next_hop: str):
"""
:param str destination_cidr: The destination CIDR.
:param str next_hop: The next hop in the route.
"""
pulumi.set(__self__, "destination_cidr", destination_cidr)
pulumi.set(__self__, "next_hop", next_hop)
@property
@pulumi.getter(name="destinationCidr")
def destination_cidr(self) -> str:
"""
The destination CIDR.
"""
return pulumi.get(self, "destination_cidr")
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> str:
"""
The next hop in the route.
"""
return pulumi.get(self, "next_hop")
@pulumi.output_type
class TrunkSubPort(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "portId":
suggest = "port_id"
elif key == "segmentationId":
suggest = "segmentation_id"
elif key == "segmentationType":
suggest = "segmentation_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in TrunkSubPort. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
TrunkSubPort.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
TrunkSubPort.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
port_id: str,
segmentation_id: int,
segmentation_type: str):
"""
:param str port_id: The ID of the port to be made a subport of the trunk.
:param int segmentation_id: The numeric id of the subport segment.
:param str segmentation_type: The segmentation technology to use, e.g., "vlan".
"""
pulumi.set(__self__, "port_id", port_id)
pulumi.set(__self__, "segmentation_id", segmentation_id)
pulumi.set(__self__, "segmentation_type", segmentation_type)
@property
@pulumi.getter(name="portId")
def port_id(self) -> str:
"""
The ID of the port to be made a subport of the trunk.
"""
return pulumi.get(self, "port_id")
@property
@pulumi.getter(name="segmentationId")
def segmentation_id(self) -> int:
"""
The numeric id of the subport segment.
"""
return pulumi.get(self, "segmentation_id")
@property
@pulumi.getter(name="segmentationType")
def segmentation_type(self) -> str:
"""
The segmentation technology to use, e.g., "vlan".
"""
return pulumi.get(self, "segmentation_type")
@pulumi.output_type
class GetPortAllowedAddressPairResult(dict):
def __init__(__self__, *,
ip_address: str,
mac_address: str):
"""
:param str ip_address: The additional IP address.
:param str mac_address: The MAC address of the port.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "mac_address", mac_address)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> str:
"""
The additional IP address.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> str:
"""
The MAC address of the port.
"""
return pulumi.get(self, "mac_address")
@pulumi.output_type
class GetPortBindingResult(dict):
def __init__(__self__, *,
host_id: str,
profile: str,
vif_details: Mapping[str, Any],
vif_type: str,
vnic_type: str):
"""
:param str host_id: The ID of the host, which has the allocatee port.
:param str profile: A JSON string containing the binding profile information.
:param Mapping[str, Any] vif_details: A map of JSON strings containing additional details for this
specific binding.
:param str vif_type: The VNIC type of the port binding.
:param str vnic_type: VNIC type for the port.
"""
pulumi.set(__self__, "host_id", host_id)
pulumi.set(__self__, "profile", profile)
pulumi.set(__self__, "vif_details", vif_details)
pulumi.set(__self__, "vif_type", vif_type)
pulumi.set(__self__, "vnic_type", vnic_type)
@property
@pulumi.getter(name="hostId")
def host_id(self) -> str:
"""
The ID of the host, which has the allocatee port.
"""
return pulumi.get(self, "host_id")
@property
@pulumi.getter
def profile(self) -> str:
"""
A JSON string containing the binding profile information.
"""
return pulumi.get(self, "profile")
@property
@pulumi.getter(name="vifDetails")
def vif_details(self) -> Mapping[str, Any]:
"""
A map of JSON strings containing additional details for this
specific binding.
"""
return pulumi.get(self, "vif_details")
@property
@pulumi.getter(name="vifType")
def vif_type(self) -> str:
"""
The VNIC type of the port binding.
"""
return pulumi.get(self, "vif_type")
@property
@pulumi.getter(name="vnicType")
def vnic_type(self) -> str:
"""
VNIC type for the port.
"""
return pulumi.get(self, "vnic_type")
@pulumi.output_type
class GetPortExtraDhcpOptionResult(dict):
def __init__(__self__, *,
ip_version: int,
name: str,
value: str):
"""
:param int ip_version: IP protocol version
:param str name: The name of the port.
:param str value: Value of the DHCP option.
"""
pulumi.set(__self__, "ip_version", ip_version)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="ipVersion")
def ip_version(self) -> int:
"""
IP protocol version
"""
return pulumi.get(self, "ip_version")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the port.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
Value of the DHCP option.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class GetRouterExternalFixedIpResult(dict):
def __init__(__self__, *,
ip_address: Optional[str] = None,
subnet_id: Optional[str] = None):
"""
:param str ip_address: The IP address to set on the router.
"""
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[str]:
"""
The IP address to set on the router.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[str]:
return pulumi.get(self, "subnet_id")
@pulumi.output_type
class GetSubnetAllocationPoolResult(dict):
def __init__(__self__, *,
end: str,
start: str):
pulumi.set(__self__, "end", end)
pulumi.set(__self__, "start", start)
@property
@pulumi.getter
def end(self) -> str:
return pulumi.get(self, "end")
@property
@pulumi.getter
def start(self) -> str:
return pulumi.get(self, "start")
@pulumi.output_type
class GetSubnetHostRouteResult(dict):
def __init__(__self__, *,
destination_cidr: str,
next_hop: str):
pulumi.set(__self__, "destination_cidr", destination_cidr)
pulumi.set(__self__, "next_hop", next_hop)
@property
@pulumi.getter(name="destinationCidr")
def destination_cidr(self) -> str:
return pulumi.get(self, "destination_cidr")
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> str:
return pulumi.get(self, "next_hop")
@pulumi.output_type
class GetTrunkSubPortResult(dict):
def __init__(__self__, *,
port_id: str,
segmentation_id: int,
segmentation_type: str):
"""
:param str port_id: The ID of the trunk parent port.
:param int segmentation_id: The numeric id of the subport segment.
:param str segmentation_type: The segmenation tecnology used, e.g., "vlan".
"""
pulumi.set(__self__, "port_id", port_id)
pulumi.set(__self__, "segmentation_id", segmentation_id)
pulumi.set(__self__, "segmentation_type", segmentation_type)
@property
@pulumi.getter(name="portId")
def port_id(self) -> str:
"""
The ID of the trunk parent port.
"""
return pulumi.get(self, "port_id")
@property
@pulumi.getter(name="segmentationId")
def segmentation_id(self) -> int:
"""
The numeric id of the subport segment.
"""
return pulumi.get(self, "segmentation_id")
@property
@pulumi.getter(name="segmentationType")
def segmentation_type(self) -> str:
"""
The segmenation tecnology used, e.g., "vlan".
"""
return pulumi.get(self, "segmentation_type")
|
### This file is a part of the Syncpy library.
### Copyright 2015, ISIR / Universite Pierre et Marie Curie (UPMC)
### Main contributor(s): Giovanna Varni, Marie Avril,
### syncpy@isir.upmc.fr
###
### This software is a computer program whose for investigating
### synchrony in a fast and exhaustive way.
###
### This software is governed by the CeCILL-B license under French law
### and abiding by the rules of distribution of free software. You
### can use, modify and/ or redistribute the software under the terms
### of the CeCILL-B license as circulated by CEA, CNRS and INRIA at the
### following URL "http://www.cecill.info".
### As a counterpart to the access to the source code and rights to
### copy, modify and redistribute granted by the license, users are
### provided only with a limited warranty and the software's author,
### the holder of the economic rights, and the successive licensors
### have only limited liability.
###
### In this respect, the user's attention is drawn to the risks
### associated with loading, using, modifying and/or developing or
### reproducing the software by the user in light of its specific
### status of free software, that may mean that it is complicated to
### manipulate, and that also therefore means that it is reserved for
### developers and experienced professionals having in-depth computer
### knowledge. Users are therefore encouraged to load and test the
### software's suitability as regards their requirements in conditions
### enabling the security of their systems and/or data to be ensured
### and, more generally, to use and operate it in the same conditions
### as regards security.
###
### The fact that you are presently reading this means that you have
### had knowledge of the CeCILL-B license and that you accept its terms.
"""
.. moduleauthor:: Giovanna Varni
"""
import sys
sys.path.insert(0, '../src/')
import numpy as np
import pandas as pd
import random
from scipy.spatial import cKDTree
from scipy.special import psi
from Method import Method, MethodArgList
class MutualInformation(Method):
"""
It computes Mutual Information (MI) estimators starting from entropy estimates from k-nearest-neighbours distances.
**Reference :**
* A.Kraskov, H.Stogbauer, and P.Grassberger. Estimating mutual information. Physical Review E, 69(6):066138, 2004
:param n_neighbours:
number of nearest neighbours
:type n_neighbours: int
:param my_type:
Type of the estimators will be used to compute MI. Two options (1 and 2) are available:
1. the number of the points nx and ny is computed by taking into account only the points whose distance is stricly
less than the distance of the k-nearest neighbours;
2. the number of the points nx and ny is computed by taking into account only the points whose distance is equal to
or less than the distance of the k-nearest neighbours;
Default: 1
:type my_type: int
:param var_resc:
Boolean value indicating if the input signals should be rescaled at unitary variance. Default: False
:type var_resc: bool
:param noise:
Boolean value indicating if a very low amplitude random noise should be added to the signals.
It is done to avoid that there are many signals points having identical coordinates. Default: True
:type noise: bool
"""
argsList = MethodArgList()
argsList.append('n_neighbours', 10, int, 'number of the nearest neighbours to be used')
argsList.append('my_type', 1, int, 'the type of estimators [1 or 2]')
argsList.append('var_resc', False, bool, 'signals should be rescaled at unitary variance')
argsList.append('noise', True, bool, 'adding random noise to the time series')
''' Constuctor '''
def __init__(self, n_neighbours, my_type=1, var_resc=True, noise=True, **kwargs):
super(MutualInformation, self).__init__(plot=False,**kwargs)
' Raise error if parameters are not in the correct type '
try :
if not(isinstance(n_neighbours, int)) : raise TypeError("Requires n_neighbours to be an integer")
if not(isinstance(my_type, int)) : raise TypeError("Requires my_type to be an integer")
if not(isinstance(var_resc, bool)) : raise TypeError("Requires var_resc to be a boolean")
if not(isinstance(noise, bool)) : raise TypeError("Requires noise to be a boolean")
except TypeError, err_msg:
raise TypeError(err_msg)
return
' Raise error if parameters do not respect input rules '
try :
if n_neighbours<=0: raise ValueError("Requires n_neighbours to be a positive integer greater than 0")
if my_type != 1 and my_type != 2 : raise ValueError("Requires my_type to be to be 1 or 2" )
except ValueError, err_msg:
raise ValueError(err_msg)
return
self.n_neighbours=n_neighbours
self.type=my_type
self.var_resc=var_resc
self.noise=noise
def compute(self,signals):
"""
It computes Mutual Information.
:param x:
first input signal
:type x: pd.DataFrame
:param y:
second input signal
:type y: pd.DataFrame
:returns: dict
-- Mutual Information
"""
x = signals[0]
y = signals[1]
' Raise error if parameters are not in the correct type '
try :
if not(isinstance(x, pd.DataFrame)) : raise TypeError("Requires x to be a pd.DataFrame")
if not(isinstance(y, pd.DataFrame)) : raise TypeError("Requires y to be a pd.DataFrame")
except TypeError, err_msg:
raise TypeError(err_msg)
return
x.astype(np.float64)
y.astype(np.float64)
#pd.set_option('display.precision', 13) #to print pandas dataframe with 13 digits (12 decimals)
#np.set_printoptions(precision=13) #to print np array with 13 digits
#random noise generation
if self.noise==True:
rnoise_x=pd.DataFrame(1.0*np.random.rand(x.shape[0],1)/1e10,x.index)
rnoise_x.astype(np.float64)
rnoise_y=pd.DataFrame(1.0*np.random.rand(x.shape[0],1)/1e10,y.index)
rnoise_y.astype(np.float64)
x.iloc[:,0] += rnoise_x.iloc[:,0]
y.iloc[:,0] += rnoise_y.iloc[:,0]
#rescaling of the time series
if self.var_resc==True:
xstd=((x-x.mean(axis=0))/x.std(axis=0))-x.min()
ystd=((y-y.mean(axis=0))/y.std(axis=0))-y.min()
x=xstd-xstd.min()
y=ystd-ystd.min()
#building z time series vector
z=pd.concat([x,y],axis=1)
z.astype(np.float64)
d=np.array([])
idx=np.array([])
d.astype(np.float64,order='C')
dx=np.array([])
dx.astype(np.float64,order='C')
dy=np.array([])
dy.astype(np.float64,order='C')
dz=np.array([])
dz.astype(np.float64,order='C')
#looking for the k nearest neighbours
tree=cKDTree(z)
for i in range(0,z.shape[0]):
di,idxi=tree.query(z.iloc[i].values,k=self.n_neighbours+1,p=np.inf)
dx_abs=(np.abs(x.iloc[idxi].values-z.iloc[i,0]))
dx_abs=np.delete(dx_abs,np.where(dx_abs==0))
dx_i=np.max(dx_abs)
dx=np.append(dx,dx_i)
dy_abs=(np.abs(y.iloc[idxi].values-z.iloc[i,1]))
dy_abs=np.delete(dy_abs,np.where(dy_abs==0))
dy_i=np.max(dy_abs)
dy=np.append(dy,dy_i)
dz_i=max(dx_i,dy_i)
dz=np.append(dz,dz_i)
nx=np.array([])
ny=np.array([])
#Estimator 1
if self.type==1:
for i in range(0,x.shape[0]):
nx_i=np.count_nonzero((np.array((np.abs(x.subtract(x.iloc[i])))).astype(np.float64,order='C')) < dz[i] )
if nx_i > 0:
nx_i=nx_i-1
nx=np.append(nx,nx_i)
ny_i=np.count_nonzero((np.array((np.abs(y.subtract(y.iloc[i])))).astype(np.float64,order='C')) < dz[i])
if ny_i > 0:
ny_i=ny_i-1
ny=np.append(ny,ny_i)
psi_xy=(psi(nx+1)+psi(ny+1))
MI = psi(self.n_neighbours)+psi(z.shape[0])-np.mean(psi_xy)
# Estimator 2
elif self.type==2:
for i in range(0,x.shape[0]):
nx_i=np.count_nonzero((np.array((np.abs(x.subtract(x.iloc[i])))).astype(np.float64,order='C'))<= dx[i] )
if nx_i > 0:
nx_i=nx_i-1
nx=np.append(nx,nx_i)
ny_i=np.count_nonzero((np.array((np.abs(y.subtract(y.iloc[i])))).astype(np.float64,order='C'))<= dy[i])
if ny_i > 0:
ny_i=ny_i-1
ny=np.append(ny,ny_i)
psi_xy=(psi(nx)+psi(ny))
MI = psi(self.n_neighbours)+psi(z.shape[0])-(1.0/self.n_neighbours)-np.mean(psi_xy)
results = dict()
results['MI'] = MI
return results
@staticmethod
def getArguments():
return MutualInformation.argsList.getMethodArgs()
@staticmethod
def getArgumentsAsDictionary():
return MutualInformation.argsList.getArgumentsAsDictionary()
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for qa_utils."""
import os
from tensorflow_datasets import testing
from tensorflow_datasets.question_answering import qa_utils
class QAUtilsTest(testing.TestCase):
def test_generate_squadlike_examples(self):
filepath = os.path.join(testing.test_utils.fake_examples_dir(), 'xquad',
'translate-test.json')
examples = qa_utils.generate_squadlike_examples(filepath)
self.assertEqual(
list(examples),
[('1', {
'id': '1',
'title': 'Zurich_Switzerland',
'context':
'Zurich is the largest city in Switzerland with over 400000 '
'inhabitants. In spite of this, it is not the capital of '
'Switzerland, which is located in Bern aka Bernie.',
'question': 'What is the capital of Switzerland?',
'answers': {
'answer_start': [1, 20, 29],
'text': ['Zurich', 'Bern', 'Bernie']
}
}),
('2', {
'id': '2',
'title': 'Zurich_Switzerland',
'context':
'Switzerland is the country in Euriope with 26 cantons. Zurich '
'canton has the largest population of 1.5 million.',
'question': 'How many cantons does Switzerland have?',
'answers': {
'answer_start': [8],
'text': ['26']
}
}),
('3', {
'id': '3',
'title': 'Paris_France',
'context':
'Paris is the largest city in France with over 2 million '
'inhabitants. It is the capital of France.',
'question': 'What is the capital of France?',
'answers': {
'answer_start': [1, 7],
'text': ['Paris', 'France']
}
})])
if __name__ == '__main__':
testing.test_main()
|
from regression_tests import *
class Test1(Test):
settings=TestSettings(
tool='fileinfo',
input='8b280f2b7788520de214fa8d6ea32a30ebb2a51038381448939530fd0f7dfc16',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "8FB47562286677514075BC38D1CFD2B73481D93CB3F9C23F9AC3E6414EF34A6F")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "582DC1D97A790EF04FE2567B1EC88C26B03BF6E99937CAE6A0B50397AD20BBF8")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][4]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
class Test2(Test):
settings=TestSettings(
tool='fileinfo',
input='avgcfgex.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "3B0ABE047D7E84F3BBD12B5E399BED55E4D7E9FCC3F629B8953A8C060EF6D746")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "0CFC19DB681B014BFE3F23CB3A78B67208B4E3D8D7B6A7B1807F7CD6ECB2A54E")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][4]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
class Test3(Test):
settings=TestSettings(
tool='fileinfo',
input='c339b87d932b3f86c298b1745db1a28b1214fb7635ba3805851ef8699290f9b8',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "FCB433D6D1AFBEC9E8F5447C2C0FA4AE7553986D5C2703BE82524BE608F35F61")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "53793CFC1B2B5096CC4EDBEC527ABC5CBC20470C788162D9E54C370D51625F4A")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "C766A9BEF2D4071C863A31AA4920E813B2D198608CB7B7CFE21143B836DF09EA")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][4]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
class Test4(Test):
settings=TestSettings(
tool='fileinfo',
input='c58e6118bbe12d2c56b2db014c4eb0d3fd32cde7bca1f32a2da8169be1301e23',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "E2DBA399BE32992B74DF8A86CFD9886C2304CCC19DA8A9BE2B87809DA006379E")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "973A41276FFD01E027A2AAD49E34C37846D3E976FF6A620B6712E33832041AA6")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "3A2FBE92891E57FE05D57087F48E730F17E5A5F53EF403D618E5B74D7A7E6ECB")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][4]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
class Test5(Test):
settings=TestSettings(
tool='fileinfo',
input='crashreporter.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "1A73BF16814D061CF5930634FBBD8A55E53DF2A556469C48FDF2623DFEEEE8A8")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "51044706BD237B91B89B781337E6D62656C69F0FCFFBE8E43741367948127862")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "3E9099B5015E8F486C00BCEA9D111EE721FABA355A89BCF1DF69561E3DC6325C")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][4]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
class Test6(Test):
settings=TestSettings(
tool='fileinfo',
input='f77acb4e1523b882f5307864345e5f7d20a657a7f40863bd7ae41d2521703fec',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "FCB433D6D1AFBEC9E8F5447C2C0FA4AE7553986D5C2703BE82524BE608F35F61")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "53793CFC1B2B5096CC4EDBEC527ABC5CBC20470C788162D9E54C370D51625F4A")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "C766A9BEF2D4071C863A31AA4920E813B2D198608CB7B7CFE21143B836DF09EA")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][4]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
class Test7(Test):
settings=TestSettings(
tool='fileinfo',
input='msenvmnu.dll',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "4")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "67C529AD57B2AEDD4D248993324270C7064D4F6BDAAF70044D772D05C56001A4")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "9CBF22FAE0DD53A7395556CE6154AA14A0D03360AA8C51CFEA05D1FD8819E043")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "6413CBCF5C6AB255868033D4E701B579B2509A47C3C18B3199C140D20209C19D")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "4F987BBE4E0D1DCF48FCEFC9239AC6E62EE9DF38CAC2D32993B8533CD95C2E49")
class Test8(Test):
settings=TestSettings(
tool='fileinfo',
input='PdfConv_32.dll',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "4")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "BB70F99803DB3F20919852D5AF93BCAD68F4F9109C8014676EE2CDD6FFDD1A8E")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "5E6D2F88F617DC8B809AEE712445A41B3CDE26AF874A221A9DC98EA1DC68E3D5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "4F32D5DC00F715250ABCC486511E37F501A899DEB3BF7EA8ADBBD3AEF1C412DA")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "12F0A1DDF83D265B205B4F3BCA43B3FA89A748E9834EC24004774FD2FDE34073")
class Test9(Test):
settings=TestSettings(
tool='fileinfo',
input='thunderbird.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "5")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "1A73BF16814D061CF5930634FBBD8A55E53DF2A556469C48FDF2623DFEEEE8A8")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "51044706BD237B91B89B781337E6D62656C69F0FCFFBE8E43741367948127862")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "3E9099B5015E8F486C00BCEA9D111EE721FABA355A89BCF1DF69561E3DC6325C")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][4]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
class Test10(Test):
settings=TestSettings(
tool='fileinfo',
input='VSTST-FileConverter.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "4")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "67C529AD57B2AEDD4D248993324270C7064D4F6BDAAF70044D772D05C56001A4")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "9CBF22FAE0DD53A7395556CE6154AA14A0D03360AA8C51CFEA05D1FD8819E043")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "E43F82BC40029F17DBB516613D1E1A96EC2940CE76E0A9CD5F53BA50175A8766")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "4F987BBE4E0D1DCF48FCEFC9239AC6E62EE9DF38CAC2D32993B8533CD95C2E49")
class TestEscaping(Test):
settings=TestSettings(
tool='fileinfo',
input='3708882e564ba289416f65cb4cb2b4de',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "4")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "9D5DC543A16E3B97AA12ABB6A09C9393C1F6778E475D95C81607335D5D19AF8B")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "0D34394100E961CE4318DBA9B8DD38EBC25BB07AEF78FDA3FFF632685549BA0F")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][3]["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["attributes"]["subject"]["locality"], R"M\xfcnchen")
class Test11(Test):
settings=TestSettings(
tool='fileinfo',
args='--json --verbose',
input='x86-pe-ff6717faf307cdc5ba2d07e320cb8e33'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output["certificateTable"]["numberOfCertificates"], "3")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][0]["sha256"], "D271598ADB52545B0094E806AF9C4702D857B29D43D6896C523EEF7758519153")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][1]["sha256"], "09ED6E991FC3273D8FEA317D339C02041861973549CFA6E1558F411F11211AA3")
self.assertEqual(self.fileinfo.output["certificateTable"]["certificates"][2]["sha256"], "C3846BF24B9E93CA64274C0EC67C1ECC5E024FFCACD2D74019350E81FE546AE4")
|
#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
p = pickle_SphereDataPhysicalDiff()
|
from server.server import Server
from common.command import Command
from sys import argv
from server.handlers import handler_map, check_state
from server.handlers import unknown_handler, invalid_handler
def handle_message(serv, usr, data):
try:
msg = Command(data)
(state, handler) = handler_map[msg.command]
if check_state(serv, usr, state):
handler(serv, usr, *msg.arguments)
except ValueError:
pass
except KeyError:
unknown_handler(serv, usr, msg.command)
except TypeError:
invalid_handler(serv, usr, msg.command)
argc = len(argv)
ip = '127.0.0.1' if (argc < 2) else argv[1]
port = 7000 if (argc < 3) else int(argv[2])
serv = Server()
serv.start(ip, port, lambda usr, data:
handle_message(serv, usr, data)
)
|
from flask_restful import Resource, reqparse
from flask_jwt_extended import get_jwt_identity, jwt_required
from constants import userConstants as UserConstants
import service.userCreateUpdateService as UserCreateUpdateService
class UserEmailUpdateResource(Resource):
parser = reqparse.RequestParser()
for field in UserConstants.PARSER_FIELDS_FOR_EMAIL_UPDATE:
parser.add_argument(field, required=True, help="Missing {} field.".format(field))
@jwt_required
def patch(self):
try:
user_request = UserEmailUpdateResource.parser.parse_args()
current_user_identity = get_jwt_identity()
response = UserCreateUpdateService.update_user_email(
current_user_identity,
user_request['oldEmail'],
user_request['newEmail'])
return {'response': response[0]}, int(response[1])
except Exception as e:
return {'error': 'Exception - {} - occurred.'.format(e.args)}, 400
@jwt_required
def get(self):
return {'error': 'method not supported.'}, 405
@jwt_required
def put(self):
return {'error': 'method not supported.'}, 405
@jwt_required
def post(self):
return {'error': 'method not supported.'}, 405
@jwt_required
def delete(self):
return {'error': 'method not supported.'}, 405
|
import plotoptix, os, zipfile
from plotoptix._load_lib import BIN_PATH, PLATFORM
from plotoptix.install import download_file_from_google_drive
def install_denoiser():
"""Install denoiser binaries.
"""
print("Downloading denoiser binaries...")
if PLATFORM == "Windows":
id = "1qLyR7c_upFJKxZDKQCLuDRC3pc-iwuh0"
file_name = "denoiser_libs_win.zip"
cudnn_lib = "cudnn64_7.dll"
denoiser_lib = "optix_denoiser.6.0.0.dll"
elif PLATFORM == "Linux":
id = "1LrtDm2TXx8Rs-gZVIhSkOdkCfNFz_Tsq"
file_name = "denoiser_libs_linux.zip"
cudnn_lib = "libcudnn.so.7.3.1"
denoiser_lib = "liboptix_denoiser.so.6.0.0"
else:
raise NotImplementedError
folder = os.path.join(os.path.dirname(plotoptix.__file__), BIN_PATH)
file_name = os.path.join(folder, file_name)
cudnn_lib = os.path.join(folder, cudnn_lib)
denoiser_lib = os.path.join(folder, denoiser_lib)
try:
download_file_from_google_drive(id, file_name)
except:
print("downloading failed.")
return False
if os.path.isfile(cudnn_lib): os.remove(cudnn_lib)
if os.path.isfile(denoiser_lib): os.remove(denoiser_lib)
print("Uncompressing... ")
try:
zip_ref = zipfile.ZipFile(file_name, "r")
zip_ref.extractall(folder)
zip_ref.close()
except:
print("failed.")
return False
print("Clean up...")
if os.path.isfile(file_name): os.remove(file_name)
print("All done.")
return True
|
class Solution:
def candy(self, ratings):
"""
:type ratings: List[int]
:rtype: int
"""
ret = [1 for _ in range(len(ratings))]
for i in range(len(ratings) - 1):
if ratings[i] < ratings[i + 1] and ret[i] >= ret[i + 1]:
ret[i + 1] = ret[i] + 1
for i in range(len(ratings) - 1, 0, -1):
if ratings[i] < ratings[i - 1] and ret[i] >= ret[i - 1]:
ret[i - 1] = ret[i] + 1
return sum(ret)
|
# -*- coding: utf-8 -*-
from .specialized import (
BRAINSCut,
BRAINSROIAuto,
BRAINSConstellationDetector,
BRAINSCreateLabelMapFromProbabilityMaps,
BinaryMaskEditorBasedOnLandmarks,
BRAINSMultiSTAPLE,
BRAINSABC,
ESLR,
)
|
from uribuilder.uri import HIER_PART, URI, SCHEME, QUERY
def test_scheme():
assert SCHEME.match("http")
def test_query():
assert QUERY.match("a=1&b=1")
def test_hier_part_regex():
assert HIER_PART.match("//www.google.it/")
assert HIER_PART.match("//foo.com/blah_blah")
def test_valid_uri():
assert URI.match("http://foo.com:/blah_blah") # Test without port, only :
assert URI.match("http://foo.com/blah_blah")
assert URI.match("http://foo.com/blah_blah/")
assert URI.match("http://foo.com/blah_blah_(wikipedia)")
assert URI.match("http://foo.com/blah_blah_(wikipedia)_(again)")
assert URI.match("http://www.example.com/wpstyle/?p=364")
assert URI.match("https://www.example.com/foo/?bar=baz&inga=42&quux")
assert URI.match("http://✪df.ws/123")
assert URI.match("http://userid:password@example.com:8080")
assert URI.match("http://userid:password@example.com:8080/")
assert URI.match("http://userid@example.com")
assert URI.match("http://userid@example.com/")
assert URI.match("http://userid@example.com:8080")
assert URI.match("http://userid@example.com:8080/")
assert URI.match("http://userid:password@example.com")
assert URI.match("http://userid:password@example.com/")
assert URI.match("http://142.42.1.1/")
assert URI.match("http://142.42.1.1:8080/")
assert URI.match("http://➡.ws/䨹")
assert URI.match("http://⌘.ws")
assert URI.match("http://⌘.ws")
assert URI.match("http://foo.com/blah_(wikipedia)#cite-1")
assert URI.match("http://foo.com/blah_(wikipedia)_blah#cite-1")
assert URI.match("http://foo.com/unicode_(✪)_in_parens")
assert URI.match("http://foo.com/(something)?after=parens")
assert URI.match("http://☺.damowmow.com/")
assert URI.match("http://code.google.com/events/#&product=browser")
assert URI.match("http://j.mp")
assert URI.match("ftp://foo.bar/baz")
assert URI.match("http://foo.bar/?q=Test%20URL-encoded%20stuff")
assert URI.match("http://مثال.إختبار")
assert URI.match("http://例子.测试")
assert URI.match("http://उदाहरण.परीक्षा")
assert URI.match("http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com")
assert URI.match("http://1337.net")
assert URI.match("http://a.b-c.de")
assert URI.match("http://223.255.255.254")
assert URI.match("https://foo_bar.example.com/")
|
from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.CfgApprovingOfficers)
admin.site.register(models.CfgAttestingOfficers)
admin.site.register(models.CfgCertifyingOfficers)
admin.site.register(models.CfgGlobal)
admin.site.register(models.CfgOfficials)
admin.site.register(models.CfgPresidingOfficers)
admin.site.register(models.LegApprovedBy)
admin.site.register(models.LegAttendees)
admin.site.register(models.LegAttestedBy)
admin.site.register(models.LegCertifiedBy)
admin.site.register(models.LegPresidedOverBy)
admin.site.register(models.LegislativeInfo)
|
#from cqc.pythonLib import CQCConnection, qubit
#from lib import *
import json
def generate_python_file_from_node(folder_prefix, my_name, targets, n_receive):
with open(folder_prefix+my_name+".py", 'w') as f:
f.write("from cqc.pythonLib import CQCConnection, qubit\n\from lib import *\n\with CQCConnection('"+my_name+"') as name_inst:\n\qbitdict = node_prepare(name_inst, "+str(targets)+", "+str(n_receive)+")\n\for target in qbitdict:\n\print('"+my_name+"', target, qbitdict[target].measure())")
return
def generate_files_from_network(id):
"""routine that generates the python files for a network given with its id
Arguments:
id {[type]} -- [description]
"""
folder_prefix = "results/"+id+"/"
network_prefix = "results/"+id+"_"
g = open(network_prefix+'network.json', 'r')
data = json.load(g)
names = []
for node in data:
my_name = data[node]['my_name']
names.append(my_name)
targets = data[node]['target']
n_receive = data[node]['receivers']
#generate_python_file_from_node(folder_prefix, my_name, targets, n_receive)
g.close()
with open(folder_prefix+'run.sh', 'w') as f:
for name in names:
if name!=names[-1]:
f.write('python ../../run_node.py '+name+' '+id+' &\n')
else:
f.write('python ../../run_node.py '+name+' '+id+' \n')
with open(folder_prefix+'start.sh', 'w') as f:
f.write('simulaqron reset\nsimulaqron set backend qutip\nsimulaqron start --nodes ')
for name in names:
if name!=names[-1]:
f.write(name+',')
else:
f.write(name)
return
if __name__ == "__main__":
generate_files_from_network("b")
|
import random
import numpy as np
from blocklm_utils import ConstructBlockStrategy
from argparse import Namespace
# rng = random.Random()
# span_lengths = [2, 3, 4, 2, 3, 4]
# length = 100
#
# counts = np.array([0] * length)
# for _ in range(10000):
# rng.shuffle(span_lengths)
# spans = ConstructBlockStrategy.sample_spans(span_lengths, length, rng)
# for start, end in spans:
# counts[start: end] += 1
# print(counts)
def main():
args = Namespace()
args.seq_length = 10
args.eod_token = 0
strategy = ConstructBlockStrategy(args, None, bert_ratio=0.4)
counts = np.array([0] * 10)
for _ in range(10000):
spans = strategy.sample_span_in_document(np.array([1, 2, 3, 0, 4, 5, 6, 7, 9, 0], dtype=np.long), [1, 1],
random.Random())
for start, end in spans:
counts[start: end] += 1
print(counts)
|
'''
Docs string
syntax if-else-elseif
if condition:
[code]
elif condition2:
[code]
else:
[code]
'''
# input --> str
# x = input('Enter a number: ')
# x = int(x)
x = int(input('Enter a number: '))
if x%2 == 0:
print(x, 'is an even.')
elif x%2 == 1:
print('Reminder is 1')
else:
print(x, 'is an odd.')
|
# coding=utf-8
"""Command line processing"""
import argparse
from glenoidplanefitting import __version__
from glenoidplanefitting.ui.glenoidplanefitting_demo import run_demo
def main(args=None):
"""Entry point for glenoidplanefitting application"""
parser = argparse.ArgumentParser(description='glenoidplanefitting')
## ADD POSITIONAL ARGUMENTS
parser.add_argument("model",
type=str,
help="Filename for vtk surface model")
# ADD OPTIONAL ARGUMENTS
parser.add_argument("-p", "--planes",
required=False,
type=str,
default="",
help="Landmark points file"
)
parser.add_argument("-f", "--fried_points",
required=False,
type=str,
default="",
help="Landmark points file (freidman)"
)
parser.add_argument("-t", "--vault_points",
required=False,
type=str,
default="",
help="Landmark points file (vault)"
)
parser.add_argument("-cf", "--corr_fried",
required=False,
type=str,
default="",
help="Landmark points file (corrected friedman)"
)
parser.add_argument("-o", "--output",
required=False,
type=str,
default="",
help="Write the fitted plane or lines to a file"
)
parser.add_argument("-v", "--visualise",
required=False,
default=False,
action='store_true',
help="Visualise the results"
)
parser.add_argument("-c", "--config",
required=False,
type=str,
default=None,
help="A configuration file"
)
version_string = __version__
friendly_version_string = version_string if version_string else 'unknown'
parser.add_argument(
"--version",
action='version',
version='glenoidplanefitting version ' + friendly_version_string)
args = parser.parse_args(args)
run_demo(args.model, args.planes, args.fried_points,
args.vault_points,args.corr_fried, args.output,
args.visualise, args.config)
|
# (c) 2012-2018, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
import ast
import os
from galaxy import constants
from galaxy.importer import linters
from galaxy.importer import models
from galaxy.importer.utils import ast as ast_utils
from galaxy.importer.loaders import base
from galaxy.importer import exceptions as exc
class ModuleLoader(base.BaseLoader):
content_types = constants.ContentType.MODULE
linters = linters.Flake8Linter
def __init__(self, content_type, path, root, logger=None):
super().__init__(content_type, path, root, logger=logger)
self.documentation = None
self.metadata = None
def make_name(self):
return base.make_module_name(self.path)
def load(self):
self._parse_module()
description = ''
if self.documentation:
description = self.documentation.get('short_description', '')
readme = self._get_readme(os.path.dirname(self.path))
return models.Content(
name=self.name,
path=self.rel_path,
content_type=self.content_type,
readme=readme,
description=description,
metadata={
'ansible_metadata': self.metadata,
'documentation': self.documentation
}
)
def _parse_module(self):
with open(self.path) as fp:
code = fp.read()
try:
module = ast.parse(code) # type: ast.Module
assert isinstance(module, ast.Module), 'Module expected'
except SyntaxError as e:
raise exc.ContentLoadError(
"Syntax error while parsing module {0}: Line {1}:{2} {3}"
.format(os.path.basename(self.path),
e.lineno, e.offset, e.text))
for node in module.body:
if not isinstance(node, ast.Assign):
continue
name = node.targets[0].id
if name == 'ANSIBLE_METADATA':
self.metadata = self._parse_metdata(node)
elif name == 'DOCUMENTATION':
try:
self.documentation = ast_utils.parse_ast_doc(node)
except ValueError as e:
self.log.warning('Cannot parse "DOCUMENTATION": {0}'
.format(e))
def _parse_metdata(self, node):
# type (ast.Dict) -> dict
if not isinstance(node.value, ast.Dict):
self.log.warning('Cannot parse "ANSIBLE_METADATA" field, '
'dict expected')
return
return ast.literal_eval(node.value)
|
# --------------
# Code starts here
import numpy as np
# Code starts here
# Adjacency matrix
adj_mat = np.array([[0,0,0,0,0,0,1/3,0],
[1/2,0,1/2,1/3,0,0,0,0],
[1/2,0,0,0,0,0,0,0],
[0,1,0,0,0,0,0,0],
[0,0,1/2,1/3,0,0,1/3,0],
[0,0,0,1/3,1/3,0,0,1/2],
[0,0,0,0,1/3,0,0,1/2],
[0,0,0,0,1/3,1,1/3,0]])
# Compute eigenvalues and eigencevectrs
eigenvalues, eigenvectors = np.linalg.eig(adj_mat)
# Eigen vector corresponding to 1
eigen_1 = abs(eigenvectors[:,0]/np.linalg.norm(eigenvectors[:,0],1))
# most important page
page = np.argmax(eigen_1)+1
print("Highest Ranked Page is:",page)
# Code ends here
# --------------
# Code starts here
# Initialize stationary vector I
init_I = np.array([1,0,0,0,0,0,0,0])
# Perform iterations for power method
for i in range(10):
init_I = np.dot(adj_mat, init_I)
init_I = init_I/np.linalg.norm(init_I,1)
power_page = np.argmax(init_I)+1
# Code ends here
# --------------
# Code starts here
# New Adjancency matrix
# New Adjancency matrix
new_adj_mat = np.array([[0,0,0,0,0,0,0,0],
[1/2,0,1/2,1/3,0,0,0,0],
[1/2,0,0,0,0,0,0,0],
[0,1,0,0,0,0,0,0],
[0,0,1/2,1/3,0,0,1/2,0],
[0,0,0,1/3,1/3,0,0,1/2],
[0,0,0,0,1/3,0,0,1/2],
[0,0,0,0,1/3,1,1/2,0]])
# Initialize stationary vector I
new_init_I = np.array([1,0,0,0,0,0,0,0])
# Perform iterations for power method
for i in range(10):
new_init_I = np.dot(new_adj_mat, new_init_I)
new_init_I = new_init_I/np.linalg.norm(new_init_I,1)
print(new_init_I)
# Code ends here
# --------------
# Alpha value
alpha = 0.85
# Code starts here
# Modified adjancency matrix
n = len(new_adj_mat)
one = np.ones(new_adj_mat.shape)
G = alpha * new_adj_mat + (1-alpha) * 1/n * one
# Initialize stationary vector I
final_init_I = np.array([1,0,0,0,0,0,0,0])
# Perform iterations for power method
for i in range(1000):
final_init_I = np.dot(G,final_init_I)
final_init_I = final_init_I/np.linalg.norm(final_init_I,1)
print(final_init_I)
# Code ends here
|
#!/usr/bin/python2
# SPDX-License-Identifier: MIT
import os, sys, subprocess
d = sys.argv[1]
o = sys.argv[2]
l = os.listdir(d)
t = 0
for fn in l:
print str(t)
p = subprocess.check_output(['./f2fs_standalone', 'repro', 'f2fs.img', os.path.join(o, str(t) + '.img'), os.path.join(d, fn)])
t += 1
|
import shutil
def progressbar(
current_index, total_index, left_description=None, right_description=None
):
"""Simple progressbar function that prints an ASCII-based progressbar
in the terminal with the additional textual description on the left and
right-hand side. In programs where there is a long loop, this method can
be used to print a progressbar with progress text.
Here is a example program to use this library:
import time
from py_progress import progressbar
for i in range(100):
progressbar(i, 100, f"Loss: {i*2123}", f"Accuracy: {i*2}%")
time.sleep(.3)
This will return the following result
~/.../experiments/progress-bar-test >>> python main.py
--> Loss: 210177 |||||||||||||||||||||||||||||||| Accuracy: 100 %
Here the text Loss: 210177 and Accuracy: 100% is both text passed in the method.
Args:
current_index (float): Current progress in the task
total_index (float): Total number of steps in the task
left_description (str, optional): Some description to put in the left side of
the progressbar. Defaults to None.
right_description (str, optional): Some description to put in the right side of
the progressbar. Defaults to None.
Raises:
ValueError: If the input validation failed, then it throws the error
"""
if not isinstance(current_index, int) and current_index < 0:
raise ValueError(
"The parameter current_index is not valid, please provide a value that is\
greater than -1"
)
if not isinstance(total_index, int) and total_index < 1:
raise ValueError(
"The parameter total_index is not valid, please provide a value that is\
greater than 0"
)
if left_description and not isinstance(left_description, str):
raise ValueError(
"The parameter left_description is not valid, please pass None or provide\
a string"
)
if right_description and not isinstance(right_description, str):
raise ValueError(
"The parameter right_description is not valid, please pass None or provide\
a string"
)
width, _ = shutil.get_terminal_size()
if left_description:
left_description = left_description + " "
else:
left_description = ""
if right_description:
right_description = " " + right_description
else:
right_description = ""
total_index_chars = len(left_description + right_description)
progressbar_text = ""
if width > total_index_chars:
percent = (width - total_index_chars) * current_index / total_index
progressbar_text = left_description + ("|" * int(percent)) + right_description
else:
progressbar_text = left_description + "|" + right_description
print("\r" + progressbar_text, end="")
|
class Solution:
def shortestSuperstring(self, words: List[str]) -> str:
good_words = []
for word in words:
dominated = False
for word2 in words:
if word in word2 and word != word2:
dominated = True
if not dominated:
good_words.append(word)
good_words.append("")
N = len(good_words)
overlap = [[-1] * N for _ in range(N)]
for i in range(N):
for j in range(N):
overlap[i][j] = 0
for k in range(1, len(good_words[i])):
if good_words[j].startswith(good_words[i][k:]):
overlap[i][j] = len(good_words[i]) - k
break
cache = {}
def get_subset(i, mask):
if(mask == 0): return good_words[i]
key =(i, mask)
if key in cache:
return cache[key]
best_length = 300
ans = "0" * 300
debug = []
for j in range(N):
if(mask & (1 << j)) > 0:
c = overlap[i][j]
r = get_subset(j, mask ^ (1 << j))
current = good_words[i] + r[c:]
if len(current) < best_length:
best_length = len(current)
ans = current
cache[key] = ans
return ans
return get_subset(N - 1, (1 << (N - 1)) - 1)
|
import json
import xlrd
coordinates = open("coordinates.json", "r")
cDict = json.loads(coordinates.read())
coordinates.close()
# print(cDict["FP1"])
# Archvio de salida
jDict = {}
data = xlrd.open_workbook("Matrices-Grafos3D.xlsx")
sheet = data.sheet_by_index(0)
# cellData = sheet.cell_value(16,1)
# print(cellData)
margin = 0
# Obtener el nombre de los nodos
nodes = []
for i in range(19):
nodes.append(sheet.cell_value(44+i,0).upper())
# Pasamos los nodos al diccionario
jDict["1"] = {}
jDict["1"]["nodes"] = [{"id": x, "name": x} for x in nodes]
for i in jDict["1"]["nodes"]:
i["fx"] = cDict[i["name"]]["fx"]
i["fy"] = cDict[i["name"]]["fy"]
i["fz"] = cDict[i["name"]]["fz"]
# print(i)
jDict["1"]["links"] = []
for i in range(18):
column = 1 + i
for j in range(18):
row = 45 + j + margin
if(row <= 62):
if(sheet.cell_value(row, column) != 0):
# Valor, origen, destino
cellData = sheet.cell_value(row, column)
# print(cellData, sheet.cell_value(row,0), sheet.cell_value(43, column))
jDict["1"]["links"].append({"target": sheet.cell_value(43, column).upper(), "source": sheet.cell_value(row,0).upper(), "width": cellData})
# print()
margin+=1
# print(len(jDict["1"]["links"]))
# Salida del archivo
with open("data3.json", "w") as outfile:
json.dump(jDict, outfile)
|
from cmd import Cmd
import os
import json
from dbestclient.executor.executor import SqlExecutor
config = {
'warehousedir': 'dbestwarehouse',
'verbose': 'True',
'b_show_latency': 'True',
'backend_server': 'None',
'epsabs': 10.0,
'epsrel': 0.1,
'mesh_grid_num': 20,
'limit': 30,
'csv_split_char': ','
}
class DBEstPrompt(Cmd):
def __init__(self):
super(DBEstPrompt, self).__init__()
self.prompt = 'dbestclient> '
self.intro = "Welcome to DBEst: a model-based AQP engine! Type exit to exit!"
self.query = ""
# deal with configuration file
if os.path.exists('config.json'):
print("Configuration file loaded.")
self.config = json.load(open('config.json'))
else:
print("Configuration file config.json does not exist! use default values")
self.config = config
json.dump(self.config, open('config.json', 'w'))
self.verbose = self.config['verbose']
self.b_show_latency = self.config['b_show_latency']
# deal with warehouse
if os.path.exists(self.config['warehousedir']):
print("warehouse is initialized.")
else:
print("warehouse does not exists, so initialize one.")
os.mkdir(self.config['warehousedir'])
self.sqlExecutor = SqlExecutor(self.config)
# print the exit message.
def do_exit(self, inp):
'''exit the application.'''
print("DBEst closed successfully.")
return True
# process the query
def default(self, inp):
if ";" not in inp:
self.query = self.query + inp + " "
else:
self.query += inp.split(";")[0]
if self.config['verbose']:
print("Executing query >>> " + self.query + "...")
# query execution goes here
# -------------------------------------------->>
# check if query begins with 'bypass', if so use backend server, otherwise use dbest to give a prediction
if self.query.lstrip()[0:6].lower() == 'bypass':
print("Bypass DBEst, use the backend server instead.")
# go to the backend server
else:
# sqlExecutor = SqlExecutor(config)
# print(self.query)
# self.query.replace(";",'')
self.sqlExecutor.execute(self.query)
# self.sqlExecutor.execute("create table mdl1(pm25 real, PRES real) from pm25.csv method uniform size 100")
# self.sqlExecutor.execute("select count(pm25 real) from mdl1 where PRES between 1000 and 1020")
# sqlExecutor.execute("select sum(pm25 real) from mdl where PRES between 1000 and 1020")
# sqlExecutor.execute("select avg(pm25 real) from mdl where PRES between 1000 and 1020")
# self.sqlExecutor.execute("create table ss(ss_list_price real, ss_wholesale_cost real) from store_sales.dat method uniform size 10000 group by ss_store_sk")
# self.sqlExecutor.execute("select count(ss_list_price) from ss where ss_wholesale_cost between 1 and 100 group by ss_store_sk")
# <<--------------------------------------------
# restore the query for the next coming query
self.query = ""
# deal with KeyboardInterrupt caused by ctrl+c
def cmdloop(self, intro=None):
print(self.intro)
while True:
try:
super(DBEstPrompt, self).cmdloop(intro="")
break
except KeyboardInterrupt:
# self.do_exit("")
print("DBEst closed successfully.")
return True
do_EOF = do_exit
if __name__ == "__main__":
p = DBEstPrompt()
p.cmdloop()
|
from flask_restful import abort, Resource, reqparse
from flask import jsonify
from resources.models.User import User
from resources.models.UserArtist import UserArtist
from sqlalchemy.sql import func
class UserArtistListApi(Resource):
user = None
def __init__(self, **kwargs):
self.dbConn = kwargs['dbConn']
self.dbSession = kwargs['dbSession']
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('current', type=int)
self.reqparse.add_argument('limit', type=int)
self.reqparse.add_argument('skip', type=int)
self.reqparse.add_argument('filter', type=str)
self.reqparse.add_argument('sort', type=str)
self.reqparse.add_argument('order', type=str)
self.reqparse.add_argument('inc', type=str)
def abort_if_user_doesnt_exist(self, artistId):
self.user = self.dbSession.query(User).filter(User.roadieId == artistId).first()
if not self.user:
abort(404, message="User {} doesn't exist".format(artistId))
def get(self, userId):
self.abort_if_user_doesnt_exist(userId)
args = self.reqparse.parse_args()
get_current = args.current
get_limit = args.limit or 25
get_skip = args.skip or 0
sort = args.sort or 'userartist.lastUpdated'
order = args.order or 'desc'
if userId:
self.abort_if_user_doesnt_exist(userId)
includes = args.inc or 'artist,thumbnails'
if order != 'asc':
order = "-"
else:
order = ""
if get_current:
get_skip = (get_current * get_limit) - get_limit
if args.filter:
total_records = self.dbSession \
.query(func.count(UserArtist.id)) \
.filter(UserArtist.userId == self.user.id) \
.filter(UserArtist.artist.name.like("%" + args.filter + "%")) \
.scalar()
artists = self.dbSession \
.query(UserArtist) \
.filter(UserArtist.userId == self.user.id) \
.filter(UserArtist.artist.name.like("%" + args.filter + "%")) \
.order_by(order + sort) \
.slice(get_skip, get_skip + get_limit)
else:
q = self.dbSession \
.query(UserArtist) \
.filter(UserArtist.userId == self.user.id) \
.order_by(order + sort)
total_records_q = q.statement.with_only_columns([func.count(UserArtist.id)]).order_by(None).group_by(
UserArtist.userId)
total_records = q.session.execute(total_records_q).scalar()
artists = self.dbSession \
.query(UserArtist) \
.filter(UserArtist.userId == self.user.id) \
.order_by(order + sort) \
.slice(get_skip, get_skip + get_limit)
rows = []
if artists:
for track in artists:
rows.append(track.serialize(includes, self.dbConn))
return jsonify(rows=rows, current=args.current or 1, rowCount=len(rows), total=total_records, message="OK")
|
import pytest
from django.utils import timezone
pytestmark = [pytest.mark.django_db]
def test_order_is_shipped_even_if_it_is_not_paid(order, ship, course, user):
result = order.ship_without_payment()
assert result is True
ship.assert_called_once_with(course, to=user, order=order)
def test_order_is_marked_as_shipped_even_if_it_is_not_paid(order):
order.ship_without_payment()
order.refresh_from_db()
assert order.shipped is not None
assert order.paid is None
def test_not_ships_if_order_is_paid(order, ship):
order.paid = timezone.now()
result = order.ship_without_payment()
assert result is False
ship.assert_not_called()
|
from .context import solvers
from solvers import Alphametic
import unittest
class AlphameticTest(unittest.TestCase):
"""Tests for the Alphametic solver"""
def testDivision(self):
a = Alphametic()
a.AddDivision(dividend="FHPOHSKF", divisor="ITSSKR", quotient="HIF")
a.AddDivision(dividend="FHPOHS", divisor="ITSSKR", quotient="H", remainder="TPRPI")
a.AddProduct(result="FISSHK", initial_value="ITSSKR", multiplier="H")
a.AddSubtraction(result="TPRPI", initial_value="FHPOHS", reduction="FISSHK")
a.AddDivision(dividend="TPRPIK", divisor="ITSSKR", quotient="I", remainder="RRPCI")
a.AddProduct(result="ITSSKR", initial_value="ITSSKR", multiplier="I")
a.AddSubtraction(result="RRPCI", initial_value="TPRPIK", reduction="ITSSKR")
a.AddDivision(dividend="RRPCIF", divisor="ITSSKR", quotient="F", remainder="ITPCKP")
a.AddProduct(result="OHSSCF", initial_value="ITSSKR", multiplier="F")
a.AddSubtraction(result="ITPCKP", initial_value="RRPCIF", reduction="OHSSCF")
expectedSolution = {"P":0, "I": 1, "T": 2, "C": 3, "H": 4, "F": 5, "O": 6, "R": 7, "K": 8, "S": 9}
self.assertEqual(a.Solution(), expectedSolution)
def testKnownLetters(self):
a = Alphametic()
a.AddSum(initial_value="CEYLON", addition="BLACK", result="KETTLE")
a.AddKnownLetter("N", 8)
a.AddKnownLetter("Y", 2)
expectedSolution = {"A":3, "O":1, "B":9, "T":0, "L":7, "K":6, "E":4, "C":5, "Y":2, "N":8}
self.assertEqual(a.Solution(), expectedSolution)
|
def getUserName(chat):
username = ""
if chat.first_name:
username += chat.first_name
if chat.last_name:
username += chat.last_name
return username
|
x = """
AA_Page1_AB.html
A_Final_AB.css
All_Images_lab_3
All_Images_lab4_AB
All_Index_AB.html
A_MyTemplate_AB.html
A_page2_AB.html
A_page3_AB.html
bdog.jpg
blcat.jpg
business.jpg
cat.jpg
cocky.jpg
coliseum.jpg
<!DOCTYPE html>.html
elephant.jpg
family.jpg
geyser1.jpg
geyser1.jpg
geyser2.jpg
geyser2.jpg
grand2.jpg
grand.jpg
halloween1.jpg
hibiscus.jpg
hi.html
homefriend.JPG
horseshoe.jpg
hw3test.html
hydflower.jpg
index.md
Lab10aAB.html
Lab10bAB.html
Lab11aAB.html
Lab11bAB.html
Lab12AB.html
Lab13aAB.html
lab2AB.html
lab3Aab.html
lab3Bab.html
Lab4AAB.html
lab5AAB.html
lab5a.css
lab5B1ab.html
lab5B2ab.html
lab5Bab.css
Lab6ab.html
Lab7aAB.html
Lab7bAB.html
Lab888.html
Lab8AAB.html
Lab8B1AB.html
Lab8B2AB.html
Lab9aAB.html
Lab9bAB.html
LabB4AB.html
leaves.jpg
library.jpg
life.jpg
lily.jpg
me.JPG
oldfaithful.jpg
oldfaithful.jpg
oldf.jpg
paint1.jpg
paint2.jpg
paint3.jpg
paint4.jpg
pool1.jpg
pool1.jpg
pool2.jpg
pool2.jpg
pretty.jpg
ProjectFallp1.html
ProjectNavigation.html
ProjectTestPageAB.html
pumpkin.jpg
schoolfriend.jpg
usc.jpg
vote.jpg
yellow1.jpg
yellow1.jpg
yellow2.jpg
yellow2rezise.jpg
yellow3.jpg
yellow3.jpg
yellow4.jpg
yellow4.jpg
"""
x = x.split()
for i in x:
if not i.endswith("html"):
continue
print("<a href=\"%s\">%s</a><br>" % (i, i))
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import subprocess
import sys
import time
from airflow import configuration, LoggingMixin
NEED_KRB181_WORKAROUND = None
log = LoggingMixin().log
def renew_from_kt(principal, keytab):
# The config is specified in seconds. But we ask for that same amount in
# minutes to give ourselves a large renewal buffer.
renewal_lifetime = "%sm" % configuration.conf.getint('kerberos', 'reinit_frequency')
cmd_principal = principal or configuration.conf.get('kerberos', 'principal').replace(
"_HOST", socket.getfqdn()
)
cmdv = [
configuration.conf.get('kerberos', 'kinit_path'),
"-r", renewal_lifetime,
"-k", # host ticket
"-t", keytab, # specify keytab
"-c", configuration.conf.get('kerberos', 'ccache'), # specify credentials cache
cmd_principal
]
log.info("Reinitting kerberos from keytab: " + " ".join(cmdv))
subp = subprocess.Popen(cmdv,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True,
bufsize=-1,
universal_newlines=True)
subp.wait()
if subp.returncode != 0:
log.error("Couldn't reinit from keytab! `kinit' exited with %s.\n%s\n%s" % (
subp.returncode,
"\n".join(subp.stdout.readlines()),
"\n".join(subp.stderr.readlines())))
sys.exit(subp.returncode)
global NEED_KRB181_WORKAROUND
if NEED_KRB181_WORKAROUND is None:
NEED_KRB181_WORKAROUND = detect_conf_var()
if NEED_KRB181_WORKAROUND:
# (From: HUE-640). Kerberos clock have seconds level granularity. Make sure we
# renew the ticket after the initial valid time.
time.sleep(1.5)
perform_krb181_workaround(principal)
def perform_krb181_workaround(principal):
cmdv = [configuration.conf.get('kerberos', 'kinit_path'),
"-c", configuration.conf.get('kerberos', 'ccache'),
"-R"] # Renew ticket_cache
log.info("Renewing kerberos ticket to work around kerberos 1.8.1: " +
" ".join(cmdv))
ret = subprocess.call(cmdv, close_fds=True)
if ret != 0:
principal = "%s/%s" % (principal or configuration.conf.get('kerberos', 'principal'),
socket.getfqdn())
fmt_dict = dict(princ=principal,
ccache=configuration.conf.get('kerberos', 'principal'))
log.error("Couldn't renew kerberos ticket in order to work around "
"Kerberos 1.8.1 issue. Please check that the ticket for "
"'%(princ)s' is still renewable:\n"
" $ kinit -f -c %(ccache)s\n"
"If the 'renew until' date is the same as the 'valid starting' "
"date, the ticket cannot be renewed. Please check your KDC "
"configuration, and the ticket renewal policy (maxrenewlife) "
"for the '%(princ)s' and `krbtgt' principals." % fmt_dict)
sys.exit(ret)
def detect_conf_var():
"""Return true if the ticket cache contains "conf" information as is found
in ticket caches of Kerberos 1.8.1 or later. This is incompatible with the
Sun Java Krb5LoginModule in Java6, so we need to take an action to work
around it.
"""
ticket_cache = configuration.conf.get('kerberos', 'ccache')
with open(ticket_cache, 'rb') as f:
# Note: this file is binary, so we check against a bytearray.
return b'X-CACHECONF:' in f.read()
def run(principal, keytab):
if not keytab:
log.debug("Keytab renewer not starting, no keytab configured")
sys.exit(0)
while True:
renew_from_kt(principal, keytab)
time.sleep(configuration.conf.getint('kerberos', 'reinit_frequency'))
|
# coding=utf-8
import schedule
import os
import logging
import local_settings
from jobs import GenApi
logging.basicConfig(format='%(levelname)5s %(asctime)-15s #%(filename)-20s@%(funcName)-20s: %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
if __name__ == "__main__":
interval = int(os.getenv("SCHEDULE_INTERVAL"))
interval_unit = os.getenv("INTERVAL_UNIT")
logging.info(f"Scheduled Job will be running every {interval} {interval_unit}s")
gen_api = GenApi(
method=local_settings.METHOD,
endpoint=local_settings.ENDPOINT,
headers=local_settings.HEADER,
payload=local_settings.PAYLOAD,
params=local_settings.PARAMS
)
if interval_unit == "second":
schedule.every(interval).seconds.do(gen_api.run)
elif interval_unit == "hour":
schedule.every(interval).hours.do(gen_api.run)
elif interval_unit == "day":
schedule.every(interval).days.do(gen_api.run)
else:
schedule.every(1).seconds.do(gen_api.run)
while True:
schedule.run_pending()
|
from ftpack.correlation import corr_fft, conv_fft, conv, corr
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.backends.backend_pdf as pdf
N = 16
n_cont = 100
def y(x):
return np.sin(2 * x)
def z(x):
return np.cos(7 * x)
y_val = [y(2 * np.pi * i / N) for i in xrange(N)]
z_val = [z(2 * np.pi * i / N) for i in xrange(N)]
y_cont = [y(2 * np.pi * i / n_cont) for i in xrange(n_cont)]
z_cont = [z(2 * np.pi * i / n_cont) for i in xrange(n_cont)]
correlation = corr(y_val, z_val, N)
correlation_fft = corr_fft(y_val, z_val, N)
convolution = conv(y_val, z_val, N)
convolution_fft = conv_fft(y_val, z_val, N)
with pdf.PdfPages('results.pdf') as file:
plt.plot(np.arange(n_cont), y_cont)
plt.title("Y function")
plt.xlabel('x')
plt.ylabel('y=sin(2x)')
file.savefig()
plt.close()
plt.plot(np.arange(n_cont), z_cont)
plt.title("Z function")
plt.xlabel('x')
plt.ylabel('z=cos(7x)')
file.savefig()
plt.close()
plt.plot(np.arange(n_cont), y_cont)
plt.plot(np.arange(n_cont), z_cont)
plt.title("Y + Z function")
plt.xlabel('x')
plt.ylabel('y')
file.savefig()
plt.close()
plt.stem(np.arange(N), correlation, asevlines=True)
plt.title("Correlation")
plt.xlabel('x')
plt.ylabel('y')
file.savefig()
plt.close()
plt.stem(np.arange(N), correlation_fft, asevlines=True)
plt.title("Correlation FFT")
plt.xlabel('x')
plt.ylabel('y')
file.savefig()
plt.close()
plt.stem(np.arange(N), convolution, asevlines=True)
plt.title("Convolution")
plt.xlabel('x')
plt.ylabel('y')
file.savefig()
plt.close()
plt.stem(np.arange(N), convolution_fft, asevlines=True)
plt.title("Convolution FFT")
plt.xlabel('x')
plt.ylabel('y')
file.savefig()
plt.close()
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 5 11:05:03 2018
@author: ckwha
"""
import numpy as np
import matplotlib.pyplot as plt
def plot_image(i, predictions_array, true_label, img,class_names):
predictions_array, true_label, img = predictions_array[i], true_label[i], img[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
plt.imshow(img, cmap=plt.cm.binary)
predicted_label = np.argmax(predictions_array)
if predicted_label == true_label:
color = 'blue'
else:
color = 'red'
plt.xlabel("{} {:2.0f}% ({})".format(class_names[predicted_label],
100*np.max(predictions_array),
class_names[true_label]),
color=color)
def plot_value_array(i, predictions_array, true_label,class_names):
predictions_array, true_label = predictions_array[i], true_label[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
thisplot = plt.bar(range(10), predictions_array, color="#777777")
plt.ylim([0, 1])
predicted_label = np.argmax(predictions_array)
thisplot[predicted_label].set_color('red')
thisplot[true_label].set_color('blue')
|
# with pytest.warns((预期警告类1,预期警告类2,...)) as record:
# 被测试代码
# record 类似于数组,数组中的元素是捕获的警告对象
import warnings
import pytest
def warn_message():
warnings.warn("user", UserWarning)
warnings.warn("runtime", RuntimeWarning)
def test_warns():
with pytest.warns((UserWarning, RuntimeWarning)) as records:
warn_message()
print(len(records))
print(records[0].message)
print(records[1].message)
|
# -*- coding: utf-8 -*-
# @Time : 2020/8/16 17:50
from __init__ import *
import close_flag
from ui2py.Manage import Ui_Manage
class ManageUI(QWidget):
def __init__(self,Userinfo,selfMerchant=0):
super().__init__()
self.ui = Ui_Manage()
self.ui.setupUi(self)
self.closeMerchant = selfMerchant
self.info = Userinfo
self.DB = Op_DB()
self.ui.btLostFreez.clicked.connect(self.LostFreeze)
self.ui.btUnSubscribe.clicked.connect(self.UnSubscrib)
self.ui.btModifyPwd.clicked.connect(self.ModifyPW)
self.show()
def closeActive(self):
if self.close():
close_flag.ManageUI_To_MerchantUI_Flag = True
self.closeMerchant.close()
self.StaUI = StartUI()
self.StaUI.ui.show()
def LostFreeze(self):
Status = self.DB.Freeze((self.info[1],self.info[2],'','冻结账户'))
if Status[0] and Status[1]:
QMessageBox.information(self, '操作成功', "账户已被冻结,即将退出...")
self.closeActive()
else:
QMessageBox.information(self, '操作失败', "账户冻结失败")
# 注销账户
def UnSubscrib(self):
Status = self.DB.delData(self.info)
if len(Status) == 2:
if Status[0] == 'Cash Not Zero':
QMessageBox.information(self, '操作失败', f"你的账户有<b><font color=red>{Status[1]}</font></b>元未取出,请取出或转账后再操作")
else:
if Status[0]:
QMessageBox.information(self, '操作成功', "账户已注销")
self.DB.connection.commit()
self.closeActive()
else:
QMessageBox.information(self, '操作失败', "账户注销失败")
self.DB.connection.rollback()
# 修改密码
def ModifyPW(self):
def ForgetPwd():
ForgetPwd_flag = False
while 1:
CardID, okPressed = QInputDialog.getText(self, "请输入你的卡号/手机号", "CardID:", QLineEdit.Normal, "")
if okPressed:
if re.compile(r"[0-9]*").fullmatch(CardID): # 检测卡号是否为纯数字
Name, okPressed = QInputDialog.getText(self, "请输入你的用户名", "Name:", QLineEdit.Normal, "")
if okPressed:
# 补充邮件验证密码方式
if self.DB.queue_data(CardID, Name):
QMessageBox.information(self, "Information",
"Your CardID is: <b>" + CardID + "</b>\nYour Name is:<b>" + Name + "</b>")
ForgetPwd_flag = True
break # 卡号输入正确跳出循环
else:
QMessageBox.critical(self, '信息有误', "请重新输入您的卡号和用户名")
continue
else:
break # 退出用户名对话输入框
else:
QMessageBox.critical(self, '卡号有误', "卡号为11位数字")
continue
else:
break # 退出CardID对话输入框
if ForgetPwd_flag:
while 1:
Pwd1, okPressed1 = QInputDialog.getText(self, "请输入你的密码", "Password:", QLineEdit.Password, "")
if okPressed1:
Pwd2_Confirm, okPressed2 = QInputDialog.getText(self, "请再次输入你密码", "Password:", QLineEdit.Password, "")
if okPressed2:
if Pwd1 == Pwd2_Confirm:
Status = self.DB.ModeifyInfo([CardID, Name, 'loginPassword', Pwd2_Confirm, '修改密码']) #修改密码时需要修改传入的数据值
if Status[0] == 1 and Status[1] == 1:
QMessageBox.information(self, '操作成功', "密码已修改,请使用新密码登陆登录...")
self.DB.ConFirm_DB(1, 0)
self.closeActive()
break
else:
QMessageBox.information(self, '操作失败', "密码修改失败")
self.DB.ConFirm_DB(0)
else:
QMessageBox.information(self, '操作失败', "两次密码不一致,请重新输入")
continue
else:
QMessageBox.information(self, '操作失败', "密码修改失败")
break
else:
QMessageBox.information(self, '操作失败', "密码修改失败")
break
reply = QMessageBox.question(self, u'警告', u'你确定要修改密码?', QMessageBox.Yes, QMessageBox.No)
# QMessageBox.question(self,u'弹窗名',u'弹窗内容',选项1,选项2)
if reply == QMessageBox.Yes:
ForgetPwd()
else:
return 0
from StartUI import StartUI
if __name__ == '__main__':
app = QApplication(sys.argv)
app.setWindowIcon(QIcon('../UI/Bank.ico'))
panel = ManageUI((0,62201206121,'www','','freeze'))
sys.exit(app.exec_())
|
# -*- coding: UTF-8 -*-
import os
import shutil
from ..action import Action
from ..decorators import side_effecting
from ..mixins import WalkAppierMixin
from .strategies import match_strategy
class Cleaner(Action, WalkAppierMixin):
"""Класс действия для удаления файлов.
Attributes:
Внешние:
days: Число, максимальное количество дней файлов в группе
max_files: Число, максимальное количество файлов в группе
exclusions: Список паттернов для исключения файлов из удаляемых
Список файлов для удаления определяется явными ограничениями и неявными.
Пример явных:
days, max_files.
Пример неявных:
при обработке наследника pgdump будут наследованы имена
баз данных, а по ним будут построены паттерны.
"""
def __init__(self, name):
super().__init__(name)
self.max_files = None
self.days = None
def start(self):
"""Входная точка действия.
Содержит поиск и вызов стратегий для
pgdump, move_bkp_period и archiver.
"""
try:
strategy = match_strategy(self.source)
except ValueError as exc:
self.logger.error(exc)
return self.continue_on_error
strategy.clean(
self,
self.source,
max_files=self.max_files,
days=self.days,
)
return True
@side_effecting
def remove(self, filepath):
"""Удалят определённый файл по filepath.
Args:
filepath (str): путь к файлу, директории или ссылке.
Returns:
True, если возникла ошибка.
"""
error_occured = False
def onerror(function, path, excinfo):
nonlocal error_occured
error_occured = True
self.logger.warning(
'Ошибка рекурсивного удаления %s, функция %s, ошибка %s',
path,
function.__name__,
excinfo,
)
if os.path.isfile(filepath) or os.path.islink(filepath):
try:
os.remove(filepath)
except FileNotFoundError as exc:
self.logger.info(
'Файл уже удалён %s, %s',
filepath,
exc,
)
except OSError as exc:
self.logger.error(
'Ошибка при удалении файла %s, %s',
filepath,
exc,
)
else:
shutil.rmtree(
filepath,
ignore_errors=False,
onerror=onerror,
)
return error_occured
|
# Here we'll just set up some basic strings
# Strings go in quotes, but be careful about quotes in strings!
## You can see that you can create problems when you use a quote character in the
## string. There are a couple ways around this, including putting a backslash
## before the problematic quote, or using a different quote (")
print 'This person's name will be defined very well!'
# Define your name
## Define strings called your_first_name and your_last_name (they can be
## anything)
print your_first_name, your_last_name, 'was here!'
# Your strings are sequences (of characters)
## Use subscripting to pull out the second character of your last name.
## Subscripting with a single number grabs the item right after that location:
## +---+---+---+---+---+
## | H | e | l | p | A |
## +---+---+---+---+---+
## 0 1 2 3 4 5
## -5 -4 -3 -2 -1
## So, if s = 'HelpA', s[3] is 'p'. Don't forget that we start with 0 (no
## distance into the string).
print 'The second letter of my last name is' # ???
|
import numpy as np
import scipy as sp
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import pd
import pickle as pkl
def read_file(file_path):
with open(file_path, 'rb') as f:
data = pkl.load(f)
return data
|
import os
import numpy as np
import itk
import typer
import json
from pathlib import Path
import segmantic
from segmantic.prepro.labels import (
load_tissue_list,
save_tissue_list,
build_tissue_mapping,
)
drcmr_labels_16 = [
"Background",
"Air_internal",
"Artery",
"Bone_cancellous",
"Bone_cortical",
"Cerebrospinal_fluid",
"Cerebrum_grey_matter",
"Cerebrum_white_matter",
"Eyes",
"Mucosa",
"Other_tissues",
"Rectus_muscles",
"SAT",
"Skin",
"Spinal_cord",
"Vein",
"Visual_nerve",
]
def premap(name: str):
return "Other_tissues" if "SAT" == name else name
def map_bone_fg_bg(name: str):
if name.startswith("Bone_"):
return "Bone"
elif name == "Background":
return "Background"
return "Head"
def map_bone_skin_air_fg_bg(name: str):
if name.startswith("Bone_"):
return "Bone"
elif name == "Air_internal":
return "Air_internal"
elif name == "Skin":
return "Skin"
elif name == "Background":
return "Background"
return "Head"
def map_vessels2other(name: str):
if name.startswith("Bone_"):
return "Bone"
elif "Vein" == name or "Artery" == name:
return "Other_tissues"
return premap(name)
def main(
input_dir: Path,
output_dir: Path,
input_tissues: Path,
input2output: str,
):
"""Map labels in all nifty files in specified directory
Args:
input_dir (Path): input_dir
output_dir (Path): output_dir
input_tissues (Path): output tissue list file
input2output (Path): mapping name [map_bone_fg_bg, map_bone_skin_air_fg_bg, map_vessels2other], or json file mapping input to output tissues
The json file can be generated using:
with open(input2output, 'w') as f:
json.dump({ "Skull": "Bone", "Mandible": "Bone", "Fat": "Fat" }, f)
"""
# get input and output tissue lists
if input_tissues:
imap = load_tissue_list(input_tissues)
else:
imap = {n: i for i, n in enumerate(drcmr_labels_16)}
if os.path.exists(input2output):
with open(input2output) as f:
i2omap = json.load(f)
mapper = lambda n: i2omap[n]
elif input2output in locals():
mapper = locals()[str(input2output)]
else:
raise RuntimeError("Invalid mapping function specified")
# build index mapping from input to output
omap, i2o = build_tissue_mapping(imap, mapper)
os.makedirs(output_dir, exist_ok=True)
save_tissue_list(omap, output_dir / "labels_5.txt")
for input_file in input_dir.glob("*.nii.gz"):
image = segmantic.imread(input_file)
image_view = itk.array_view_from_image(image)
image_view[:] = i2o[image_view[:]]
assert len(np.unique(image)) == np.max(image) + 1
segmantic.imwrite(image, output_dir / input_file.name)
if __name__ == "__main__":
typer.run(main)
|
import datetime
import pytz
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.core import mail
from circles.models import Event, MailTemplate
User = get_user_model()
class EventTestCase(TestCase):
def setUp(self):
self.host = User(email="host@example.com", username="host@example.com")
self.host.save()
self.event = Event(
host=self.host, start=datetime.datetime(2020, 5, 1, 20, 0, tzinfo=pytz.UTC)
)
self.event.save()
def test_is_past(self):
event = Event(
host=self.host, start=datetime.datetime(1999, 5, 1, 20, 0, tzinfo=pytz.UTC)
)
self.assertTrue(event.is_past)
def test_is_full(self):
self.assertFalse(self.event.is_full)
# create 6 participants
for i in range(1, 7):
email = f"test{i}@example.com"
user = User(email=email, username=email)
user.save()
self.event.participants.add(user)
self.event.save()
self.assertTrue(self.event.is_full)
def test_ical(self):
self.assertEqual(
self.event.ical,
b"BEGIN:VCALENDAR\r\nBEGIN:VEVENT\r\nSUMMARY:Corona Circle\r\nDTSTART;VALUE=DATE-TIME:20200501T200000Z\r\nEND:VEVENT\r\nEND:VCALENDAR\r\n",
)
class EventQuerySetTestCase(TestCase):
def test_upcoming(self):
self.host = User(email="host@example.com", username="host@example.com")
self.host.save()
Event(
host=self.host, start=datetime.datetime(1999, 5, 1, 20, 0, tzinfo=pytz.UTC)
).save()
Event(
host=self.host, start=datetime.datetime(2222, 5, 1, 20, 0, tzinfo=pytz.UTC)
).save()
self.assertEqual(Event.objects.upcoming().count(), 1)
class MailTemplateTestCase(TestCase):
def setUp(self):
self.template = MailTemplate(
type="join_confirmation",
language_code="de",
subject_template="Event beigetreten",
body_template="{{ testvariable }}",
)
self.template.save()
def test_render(self):
mail = self.template.render(
{"testvariable": "This is a test"}, "max@example.com"
)
self.assertEqual(mail.body, "This is a test")
self.assertEqual(mail.subject, "Event beigetreten")
self.assertEqual(mail.to, ["max@example.com"])
def test_get_mail(self):
mail = MailTemplate.get_mail(
"join_confirmation",
"de",
{"testvariable": "This is a test"},
"max@example.com",
)
self.assertEqual(mail.subject, "Event beigetreten")
|
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from protests.models import Protest
from participant.models import Participant
# noinspection PyMethodMayBeStatic
class ParticipantCreateAPIView(APIView):
def post(self, request, pk):
protest = get_object_or_404(Protest, pk=pk)
protest.add_participant(request.user)
return Response({'created': True})
# noinspection PyMethodMayBeStatic
class ParticipantDeleteAPIView(APIView):
def post(self, request, pk):
qs = Protest.objects.filter(pk=pk).values_list('pk', flat=True)
if qs.exists():
Participant.objects.filter(protest_id=qs.get(), user=request.user).delete()
return Response({'deleted': True})
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
import logging
import sys
from marshmallow import ValidationError
from pathlib import Path
from copy import deepcopy
from asyncio import get_running_loop, all_tasks, current_task, gather
from .message import Message, MessageSchema
from .writer import PrintWriter, FileWriter, ReportErrorWriter
schema = MessageSchema()
async def listen(queue):
from halfpipe.logging import setup as setuplogging
setuplogging(queue)
loop = get_running_loop()
printWriter = PrintWriter(levelno=25) # fmriprep's IMPORTANT
logWriter = FileWriter(levelno=logging.DEBUG)
errWriter = FileWriter(levelno=logging.WARNING)
reportErrWriter = ReportErrorWriter(levelno=logging.ERROR)
writers = [printWriter, logWriter, errWriter, reportErrWriter]
[loop.create_task(writer.start()) for writer in writers]
subscribers = [writer.queue for writer in writers]
while True:
message = await loop.run_in_executor(None, queue.get)
# from pprint import pprint
# pprint(schema.dump(message))
if not isinstance(message, Message):
try:
message = schema.load(message)
except ValidationError:
continue # ignore invalid
assert isinstance(message, Message)
if message.type == "log":
for subscriber in subscribers:
messagecopy = deepcopy(message) # allow subscribers to modify message
await subscriber.put(messagecopy)
elif message.type == "set_workdir":
workdir = message.workdir
assert isinstance(workdir, (Path, str))
if not isinstance(workdir, Path):
workdir = Path(workdir)
workdir.mkdir(exist_ok=True, parents=True)
logWriter.filename = workdir / "log.txt"
logWriter.canWrite.set()
errWriter.filename = workdir / "err.txt"
errWriter.canWrite.set()
reportErrWriter.filename = workdir / "reports" / "reporterror.js"
reportErrWriter.canWrite.set()
elif message.type == "enable_verbose":
printWriter.levelno = logging.DEBUG
elif message.type == "enable_print":
printWriter.canWrite.set()
elif message.type == "disable_print":
printWriter.canWrite.clear()
elif message.type == "teardown":
# make sure that all writers have finished writing
await gather(*[subscriber.join() for subscriber in subscribers])
# then cancel all tasks
tasks = [t for t in all_tasks() if t is not current_task()]
[task.cancel() for task in tasks]
await gather(*tasks)
loop.stop()
break
queue.task_done()
|
'''3. 输入一个三位整型数,判断它是否是一个水仙花数(比如153 = 1*1*1 + 5*5*5 + 3*3*3)
计算出有多少个水仙花数
'''
num = int(input("请输入一个三位整型数字:"))
n3 = num % 10
n2 = num /10 % 10
n1 = num /100 % 10
if num ==n1*n1*n1+ n2*n2*n2 + n3*n3*n3 :
print ("它是一个水仙花数")
else:
print("它不是一个水仙花数")
con = 0
for i in range(100,1000):
i3 = i % 10
i2 = i//10 % 10
i1 = i//100 % 10
if i == i1*i1*i1+ i2*i2*i2+ i3*i3*i3:
con = con + 1
else:
continue
print(con)
|
from .qart import QArtist
|
# Copyright (C) 2017 Beijing Didi Infinity Technology and Development Co.,Ltd.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
''' tdnn kws model '''
import tensorflow as tf
from delta.models.base_model import RawModel
from delta.utils.register import registers
@registers.model.register
class TdnnKwsModel(RawModel):
''' main model '''
def __init__(self, config, name=None):
super().__init__(name=name)
self.cfg = config
self.train = None
#pylint: disable=arguments-differ
def call(self, features, **kwargs):
self.train = kwargs['training']
n_class = self.cfg['data']['task']['classes']['num']
return self.tdnn(features, n_class, self.train)
def tdnn(self, features, n_class, is_train):
'''
inp: (batch_size, window_len, feat_dim)
'''
inp = features['inputs']
kernel_size = self.cfg['model']['net']['kernel_size']
strides = self.cfg['model']['net']['strides']
num_layers = self.cfg['model']['net']['num_layers']
filters_num = inp.get_shape()[-1]
for i in range(num_layers):
output = tf.nn.relu(
tf.layers.conv1d(inp, filters_num, kernel_size, strides=strides))
output = tf.layers.batch_normalization(
output, training=is_train, name='bn%d' % i)
inp = output
dim = output.get_shape()[1] * output.get_shape()[2]
output = tf.reshape(output, [-1, dim])
logits = tf.layers.dense(output, n_class)
return logits
|
import shutil
import os
path = "/data/data/com.termux/files/home/TermuxOutput"
shutil.rmtree(path)
print("Thank you for using TermuxOutput It shall now be deleted")
print("please run <cd> to properly exit")
|
# package.module
# module description
#
# Author: Allen Leis <allen.leis@gmail.com>
# Created: timestamp
#
# Copyright (C) 2017 Allen Leis
# For license information, see LICENSE
#
# ID: filename.py [] allen.leis@gmail.com $
"""
module description
"""
##########################################################################
# Imports
##########################################################################
from unittest import skip, TestCase
from mock import patch, Mock, MagicMock
from slackclient import SlackClient
from anemoi.bots.slack import SlackBot
from anemoi.exceptions import *
##########################################################################
# Classes
##########################################################################
class TestSlackBot(TestCase):
@skip('Not Implemented')
def test_init(self):
obj = SlackBot('foo', 'foo', 'foo')
self.assertIsInstance(obj, SlackBot)
@skip('Not Implemented')
def test_filter_messages(self):
"""
tests ability to correctly filter for message events
"""
pass
@skip('Not Implemented')
def test_filter_messages_empty_data(self):
"""
tests ability process an empty list
"""
pass
@skip('Not Implemented')
def test_filter_messages_non_message(self):
"""
tests ability to filter out list of only non message events
"""
pass
def test_start_calls_listen(self):
"""
tests that start method should call listen
"""
obj = SlackBot('foo', 'foo', 'foo')
mocked_call = MagicMock()
obj.listen = mocked_call
obj.start()
mocked_call.assert_called_once()
def test_reply_raises_exc(self):
"""
tests that reply will raise SlackBadResponse if api_call returns bad response
"""
obj = SlackBot('foo', 'foo', 'foo')
mocked_call = MagicMock()
mocked_call.api_call.return_value = {'ok': False}
obj.client = mocked_call
with self.assertRaises(SlackBadResponse):
obj.reply('channel', 'content')
##########################################################################
# Execution
##########################################################################
if __name__ == '__main__':
pass
|
from forest.dsl import Node
from .post_order import PostOrderInterpreter
class NodeCounter(PostOrderInterpreter):
# */+/? concat |
# {"kleene":3, "copies":3, "posit":3, "option":3, "concat":2, "union":1}
def __init__(self):
super().__init__()
def eval_Input(self, v):
return 0
def eval_Number(self, v):
return 0
def eval_Bool(self, v):
return 0
def eval_Value(self, v):
return 0
def eval_RegexLit(self, v):
return 0
def eval_RangeLit(self, v):
return 1
def eval_conj(self, node: Node, args) -> str:
"""Bool -> Bool, Bool;"""
return 1 + sum(args)
def eval_number(self, node, args) -> str:
"""Number -> Input;"""
return 1 + sum(args)
def eval_is_int(self, node, args) -> str:
'''Bool -> Input;'''
return 1 + sum(args)
def eval_is_real(self, node, args) -> str:
'''Bool -> Input;'''
return 1 + sum(args)
def eval_is_string(self, node, args) -> str:
'''Bool -> Input;'''
return 1 + sum(args)
def eval_string(self, node, args) -> str:
'''String -> Input;'''
return 1 + sum(args)
def eval_len(self, node, args) -> str:
'''Number -> String;'''
return 1 + sum(args)
def eval_le(self, node, args) -> str:
''''Bool -> Number, Number;'''
return 1 + sum(args)
def eval_ge(self, node, args) -> str:
'''Bool -> Number, Number;'''
return args[0] + " >= " + args[1]
def eval_re(self, node, args):
return 1 + sum(args)
def eval_kleene(self, node: Node, args):
return 1 + sum(args)
def eval_option(self, node, args):
return 1 + sum(args)
def eval_posit(self, node, args):
return 1 + sum(args)
def eval_range(self, node, args):
return 1 + sum(args)
def eval_concat(self, node, args):
return 1 + sum(args)
def eval_union(self, node, args):
return 1 + sum(args)
def eval_match(self, node, args):
return sum(args)
|
# coding=utf-8
"""
Pytest config
"""
import os
import sys
import pytest
from click.testing import CliRunner
from mockito import unstub
@pytest.fixture(scope='session')
def miktex_path():
yield os.path.abspath(
os.path.join(
os.path.dirname(
os.path.dirname(__file__)
),
'miktex',
)
)
@pytest.fixture()
def runner():
runner_ = CliRunner()
yield runner_
# noinspection PyUnusedLocal
def pytest_configure(config):
"""Setup"""
assert config
sys.called_from_test = True
sys.path.append('.')
def pytest_unconfigure(config):
"""Tear down"""
# noinspection PyUnresolvedReferences
del sys.called_from_test
assert config
sys.path.remove('.')
@pytest.fixture(autouse=True)
def cleandir(request, tmpdir):
"""Provides a clean working dir"""
if 'nocleandir' in request.keywords:
yield
else:
current_dir = os.getcwd()
os.chdir(str(tmpdir))
yield os.getcwd()
os.chdir(current_dir)
@pytest.fixture(autouse=True)
def _unstub():
unstub()
yield
unstub()
@pytest.fixture(autouse=True)
def _reset_cache():
from edlm.external_tools.base import _find_patool
_find_patool.cache_clear()
yield
_find_patool.cache_clear()
@pytest.fixture(autouse=True)
def _setup_config():
from edlm.config import CFG
CFG.keep_temp_dir = False
CFG.debug = False
yield
def pytest_addoption(parser):
"""Add option for long tests"""
parser.addoption("--long", action="store_true",
help="run long tests")
def pytest_runtest_setup(item):
"""Skip long tests"""
long_marker = item.get_marker("long")
if long_marker is not None and not item.config.getoption('long'):
pytest.skip('skipping long tests')
|
# Generated with SMOP 0.41
from libsmop import *
# main.m
# This code is a basic example of one-class Matrix Factorization
# using AUC as a ranking metric and Bayesian Personalized Ranking
# as an optimization procedure (https://arxiv.org/abs/1205.2618).
#clear;
# TODO
# * Cross validation
set(0,'DefaultLineLineWidth',3)
set(groot,'DefaultAxesFontSize',30)
##
iter=20000000.0
# main.m:13
alpha=0.1
# main.m:14
lambda_=0.01
# main.m:15
sigma=0.1
# main.m:16
mu=0.0
# main.m:17
K=20
# main.m:18
reload=1
# main.m:19
subset=1000000.0
# main.m:20
tetr_ratio=0.2
# main.m:21
path='data/hashed.csv'
# main.m:22
week='W4'
# main.m:23
##
# M events
# N sources
# R_idx is an nx2 matrix holding the indices of positive signals
# names holds the string representation of sources
#[R_idx, M, N, names, ids] = gdelt(path, subset, reload);
## Create testing and training sets
tetr_split=3
# main.m:33
if tetr_split == 1:
Rall=sparse(R_idx(arange(),1),R_idx(arange(),2),1)
# main.m:37
idx_te=zeros(N,1) #junita: I think sparse is a matlab function
# main.m:38
# per source
for i in arange(1,N).reshape(-1):
idxs=find(R_idx(arange(),1) == i)
# main.m:42
rand_idx=randi(length(idxs),1)
# main.m:43
idx_te[i]=idxs(rand_idx)
# main.m:44
# Create index mask
# Test
test_mask=zeros(length(R_idx),1)
# main.m:49
test_mask[idx_te]=1
# main.m:50
test_mask=logical(test_mask)
# main.m:51
train_mask=logical_not(test_mask)
# main.m:53
R_idx_tr=R_idx(train_mask,arange())
# main.m:55
R_idx_te=R_idx(test_mask,arange())
# main.m:56
Rtr=sparse(R_idx_tr(arange(),1),R_idx_tr(arange(),2),1,N,M)
# main.m:58
Rte=sparse(R_idx_te(arange(),1),R_idx_te(arange(),2),1,N,M)
# main.m:59
else:
if tetr_split == 2:
Rall=sparse(R_idx(arange(),1),R_idx(arange(),2),1)
# main.m:62
datalen=length(R_idx)
# main.m:63
rp=randperm(datalen)
# main.m:64
pivot=ceil(datalen / 10)
# main.m:65
R_idx_te=R_idx(rp(arange(1,pivot)),arange())
# main.m:66
R_idx_tr=R_idx(rp(arange(pivot + 1,end())),arange())
# main.m:67
Rtr=sparse(R_idx_tr(arange(),1),R_idx_tr(arange(),2),1)
# main.m:70
else:
if tetr_split == 3:
R_idx_te,M_te,N_te,ids_test,names_test=gdelt_weekly_te('data/hashed',week,reload,dot(subset,tetr_ratio),nargout=5)
# main.m:73
R_idx_tr,M_tr,N_tr,ids_train,names_train=gdelt_weekly_tr('data/hashed',week,reload,dot(subset,(1 - tetr_ratio)),nargout=5)
# main.m:74
names=copy(names_train)
# main.m:76
M=max(M_te,M_tr)
# main.m:78
N=max(N_te,N_tr)
# main.m:79
R_idx=union(R_idx_te,R_idx_tr,'rows')
# main.m:81
Rall=sparse(R_idx(arange(),1),R_idx(arange(),2),1)
# main.m:82
idx_te=[]
# main.m:83
# Leave one out per source
for i in arange(1,N_te).reshape(-1):
idxs=find(R_idx_te(arange(),1) == i)
# main.m:87
if logical_not(isempty(idxs)):
rand_idx=randi(length(idxs),1)
# main.m:89
idx_te=concat([[idx_te],[idxs(rand_idx)]])
# main.m:90
# Keep only the heldout test samples
# Create a mask
not_idx_te=zeros(length(R_idx_te),1)
# main.m:96
not_idx_te[idx_te]=true
# main.m:97
not_idx_te=logical_not(not_idx_te)
# main.m:98
R_idx_tr=concat([[R_idx_tr],[R_idx_te(not_idx_te,arange())]])
# main.m:100
R_idx_te[not_idx_te,arange()]=[]
# main.m:101
# heldout samples
# Create the Source-Event interaction matrix
Rtr=sparse(R_idx_tr(arange(),1),R_idx_tr(arange(),2),1,N,M)
# main.m:105
Rte=sparse(R_idx_te(arange(),1),R_idx_te(arange(),2),1,N,M)
# main.m:106
# Sanity checks (nnz elements of Rall should be equal to the number of
# indices provided
if length(R_idx) != nnz(Rall) and tetr_split != 3:
disp('Problem in Rall.')
else:
if length(union(R_idx_te,R_idx_tr,'rows')) != nnz(Rall) and tetr_split == 3:
disp('Problen in Rall (tetr==3)')
disp(length(R_idx_tr) + length(R_idx_te) - nnz(Rall))
## Run BPR
# Record auc values
auc_vals=zeros(iter / 100000,1)
# main.m:122
# Initialize low-rank matrices with random values
P=multiply(sigma,randn(N,K)) + mu
# main.m:125
Q=multiply(sigma,randn(K,M)) + mu
# main.m:126
for step in arange(1,iter).reshape(-1):
# Select a random positive example
i=randi(concat([1,length(R_idx_tr)]))
# main.m:131
iu=R_idx_tr(i,1)
# main.m:132
ii=R_idx_tr(i,2)
# main.m:133
ji=sample_neg(Rtr,iu)
# main.m:136
px=(dot(P(iu,arange()),(Q(arange(),ii) - Q(arange(),ji))))
# main.m:139
z=1 / (1 + exp(px))
# main.m:140
d=dot((Q(arange(),ii) - Q(arange(),ji)),z) - dot(lambda_,P(iu,arange()).T)
# main.m:143
P[iu,arange()]=P(iu,arange()) + dot(alpha,d.T)
# main.m:144
d=dot(P(iu,arange()),z) - dot(lambda_,Q(arange(),ii).T)
# main.m:147
Q[arange(),ii]=Q(arange(),ii) + dot(alpha,d.T)
# main.m:148
d=dot(- P(iu,arange()),z) - dot(lambda_,Q(arange(),ji).T)
# main.m:151
Q[arange(),ji]=Q(arange(),ji) + dot(alpha,d.T)
# main.m:152
if mod(step,100000) == 0:
# Compute the Area Under the Curve (AUC)
auc=0
# main.m:157
for i in arange(1,length(R_idx_te)).reshape(-1):
te_i=randi(concat([1,length(R_idx_te)]))
# main.m:159
te_iu=R_idx_te(i,1)
# main.m:160
te_ii=R_idx_te(i,2)
# main.m:161
te_ji=sample_neg(Rall,te_iu)
# main.m:162
sp=dot(P(te_iu,arange()),Q(arange(),te_ii))
# main.m:164
sn=dot(P(te_iu,arange()),Q(arange(),te_ji))
# main.m:165
if sp > sn:
auc=auc + 1
# main.m:167
else:
if sp == sn:
auc=auc + 0.5
# main.m:167
auc=auc / length(R_idx_te)
# main.m:169
fprintf(concat(['AUC test: ',num2str(auc),'\n']))
auc_vals[step / 100000]=auc
# main.m:171
## t-SNE for users' latent factors - Computation
addpath('tSNE_matlab/')
plot_top_20=1
# main.m:180
plot_names=1
# main.m:181
plot_subset=arange(1,1000)
# main.m:182
# Get index of top 1K sources
__,I=sort(sum(Rall,2),1,'descend',nargout=2)
# main.m:185
subidx=I(plot_subset)
# main.m:186
##
# Run t-SNE on subset
ydata=tsne(P(subidx,arange()))
# main.m:190
##
#t-SNE for users' latent factors - Plot
# Get ids for known sources to show them in plot
if plot_top_20 == 1:
top_20_str=cellarray(['cnn.com','bbc.com','nytimes.com','foxnews.com','washingtonpost.com','usatoday.com','theguardian.com','dailymail.co.uk','chinadaily.com.cn','telegraph.co.uk','wsj.com','indiatimes.com','independent.co.uk','elpais.com','lemonde.fr','ft.com','bostonglobe.com','ap.org','afp.com','reuters.com','yahoo.com'])
# main.m:197
top_right_str=cellarray(['cbn.com','breitbart.com','spectator.org','foxnews.com','nypost.com','nationalreview.com','newsmax.com'])
# main.m:204
top_left_str=cellarray(['democracynow.org','huffingtonpost.com','motherjones.com','newrepublic.com','salon.com','time.com'])
# main.m:205
top_str=cellarray(['cbn.com','breitbart.com','spectator.org','foxnews.com','nypost.com','nationalreview.com','newsmax.com','democracynow.org','huffingtonpost.com','motherjones.com','newrepublic.com','salon.com','time.com','cnn.com','bbc.com','nytimes.com','foxnews.com','washingtonpost.com','usatoday.com','theguardian.com','dailymail.co.uk','chinadaily.com.cn','telegraph.co.uk','wsj.com','indiatimes.com','independent.co.uk','elpais.com','lemonde.fr','ft.com','bostonglobe.com','ap.org','afp.com','reuters.com','yahoo.com'])
# main.m:206
top_20_ids=zeros(length(top_20_str),1)
# main.m:213
top_right_ids=zeros(length(top_right_str),1)
# main.m:215
top_left_ids=zeros(length(top_left_str),1)
# main.m:216
top_ids=zeros(length(top_str),1)
# main.m:217
for ii in arange(1,length(top_20_str)).reshape(-1):
id_find=find(strcmp(top_20_str[ii],names_train))
# main.m:220
if length(id_find) > 0:
top_20_ids[ii]=id_find
# main.m:222
for ii in arange(1,length(top_right_str)).reshape(-1):
id_find=find(strcmp(top_right_str[ii],names_train))
# main.m:227
if length(id_find) > 0:
top_right_ids[ii]=id_find
# main.m:229
for ii in arange(1,length(top_left_str)).reshape(-1):
id_find=find(strcmp(top_left_str[ii],names_train))
# main.m:234
if length(id_find) > 0:
top_left_ids[ii]=id_find
# main.m:236
for ii in arange(1,length(top_str)).reshape(-1):
id_find=find(strcmp(top_str[ii],names_train))
# main.m:241
if length(id_find) > 0:
top_ids[ii]=id_find
# main.m:243
top_20_ids=top_20_ids(top_20_ids > 0)
# main.m:247
top_left_ids=top_left_ids(top_left_ids > 0)
# main.m:248
top_right_ids=top_right_ids(top_right_ids > 0)
# main.m:249
top_ids=top_ids(top_ids > 0)
# main.m:250
plot_idx=ismember(subidx,top_20_ids)
# main.m:252
# plot_idx = ismember(subidx,top_right_ids);
# plot_idx = ismember(subidx,top_left_ids);
# plot_idx = ismember(subidx,top_ids);
# ydata = tsne(P(plot_idx,:));
else:
plot_idx=copy(subidx)
# main.m:258
# Scatter plot t-SNE results
# figure;
# scatter(ydata(~plot_idx,1),ydata(~plot_idx,2));
# hold on;
# scatter(ydata(plot_idx,1),ydata(plot_idx,2), 300, 'r', 'filled');
figure
set(gca,'FontSize',30)
scatter(ydata(logical_not(plot_idx),1),ydata(logical_not(plot_idx),2),'MarkerEdgeColor',concat([0,0.5,0.5]),'MarkerFaceColor',concat([0,0.7,0.7]),'LineWidth',1.5)
hold('on')
scatter(ydata(plot_idx,1),ydata(plot_idx,2),300,'MarkerEdgeColor',concat([0.5,0,0]),'MarkerFaceColor',concat([0.9,0,0]),'LineWidth',1.5)
plot_names=2
# main.m:279
# Overlay names
if plot_names == 1:
dx=0.75
# main.m:283
dy=0.1
# main.m:283
t=text(ydata(plot_idx,1) + dx,ydata(plot_idx,2) + dy,names_train(subidx(plot_idx)))
# main.m:284
set(t,'FontSize',30)
set(t,'FontWeight','bold')
else:
dx=0.1
# main.m:288
dy=0.1
# main.m:288
t=text(ydata(arange(),1) + dx,ydata(arange(),2) + dy,names_train(subidx))
# main.m:289
set(t,'FontSize',30)
xlabel('PC1')
ylabel('PC2')
title('t-SNE projection sources latent space P')
hold('off')
## Plot Distance to Reuters + AP
# Reuters
reuters_id=find(strcmp('reuters.com',names_train))
# main.m:303
reuters=Rall(reuters_id,arange())
# main.m:304
reuters_idx=find(subidx == reuters_id)
# main.m:305
# Associated Press
ap_id=find(strcmp('ap.org',names_train))
# main.m:308
ap=Rall(ap_id,arange())
# main.m:309
ap_idx=find(subidx == ap_id)
# main.m:310
# Compute distance
dist=lambda id=None,source=None: log(nnz(logical_and(source,Rall(id,arange()))) / sum(source))
# main.m:313
recompute_dist=1
# main.m:315
if recompute_dist == 1:
dist_reuters=zeros(1,length(subidx))
# main.m:318
dist_ap=zeros(1,length(subidx))
# main.m:319
for i in arange(1,length(subidx)).reshape(-1):
source=Rall(subidx(i),arange())
# main.m:322
dist_reuters[i]=dist(reuters_id,source)
# main.m:323
dist_ap[i]=dist(ap_id,source)
# main.m:324
# Plot
figure
scatter(ydata(arange(),1),ydata(arange(),2),100,dist_ap,'filled')
hold('on')
# Scatter
scatter(ydata(reuters_idx,1),ydata(reuters_idx,2),300,'r','filled')
scatter(ydata(ap_idx,1),ydata(ap_idx,2),300,'r','filled')
# Overlay names
t1=text(ydata(reuters_idx,1) + dx,ydata(reuters_idx,2) + dy,'Reuters')
# main.m:338
t2=text(ydata(ap_idx,1) + dx,ydata(ap_idx,2) + dy,'Associated Press')
# main.m:339
set(t1,'FontSize',30)
set(t2,'FontSize',30)
set(t1,'FontWeight','bold')
set(t2,'FontWeight','bold')
colorbar
xlabel('PC1')
ylabel('PC2')
title('Log-Distance of each source to Associated Press')
figure
scatter(ydata(arange(),1),ydata(arange(),2),100,dist_reuters,'filled')
hold('on')
# Scatter
scatter(ydata(reuters_idx,1),ydata(reuters_idx,2),300,'r','filled')
scatter(ydata(ap_idx,1),ydata(ap_idx,2),300,'r','filled')
# Overlay names
t1=text(ydata(reuters_idx,1) + dx,ydata(reuters_idx,2) + dy,'Reuters')
# main.m:357
t2=text(ydata(ap_idx,1) + dx,ydata(ap_idx,2) + dy,'Associated Press')
# main.m:358
set(t1,'FontSize',30)
set(t2,'FontSize',30)
set(t1,'FontWeight','bold')
set(t2,'FontWeight','bold')
colorbar
xlabel('PC1')
ylabel('PC2')
title('Log-Distance of each source to Reuters')
## DBSCAN - Copyright (c) 2015, Yarpiz
addpath('DBSCAN/')
# Configure
epsilon=2
# main.m:373
MinPts=5
# main.m:374
X=copy(ydata)
# main.m:375
# Compute
db=DBSCAN(X,epsilon,MinPts)
# main.m:377
# Plot
PlotClusterinResult(X,db)
## Find recommendation ranking for holdout test event
# Manually curated top_20
for i in arange(1,length(top_20_ids)).reshape(-1):
search=top_20_ids(i)
# main.m:386
if search < N:
names_test(search)
# dot product : P(i) . Q
C=sum(bsxfun(times,P(search,arange()),Q.T),2)
# main.m:390
tr_idx=find(Rtr(search,arange()))
# main.m:392
C[tr_idx]=- 1000
# main.m:393
__,I_d=sort(C,1,'descend',nargout=2)
# main.m:395
holdout_event=find(Rte(search,arange()))
# main.m:397
holdout_event_id=holdout_event(1)
# main.m:398
global_id=ids_test(holdout_event_id) + 1
# main.m:399
# Find its ranking
ranking=find(I_d == holdout_event_id)
# main.m:401
## Find recommendation ranking for holdout test event
# top_20 from the dataset
auto_top_20_ids=subidx(arange(1,20))
# main.m:408
for i in arange(1,length(auto_top_20_ids)).reshape(-1):
search=auto_top_20_ids(i)
# main.m:411
if search < N:
names_test(search)
# dot product : P(i) . Q
C=sum(bsxfun(times,P(search,arange()),Q.T),2)
# main.m:415
tr_idx=find(Rtr(search,arange()))
# main.m:417
C[tr_idx]=- 1000
# main.m:418
__,I_d=sort(C,1,'descend',nargout=2)
# main.m:420
holdout_event=find(Rte(search,arange()))
# main.m:422
if numel(holdout_event) > 0:
holdout_event_id=holdout_event(1)
# main.m:424
global_id=ids_test(holdout_event_id) + 1
# main.m:425
# Find its ranking
ranking=find(I_d == holdout_event_id)
# main.m:427
else:
'No holdout found'
## Ranking - Jay
# Alternative implementation of the recommendation ranking system
# Choose a subset to rank
auto_top_20_ids=subidx(arange(1,end()))
# main.m:439
unique_te=unique(R_idx_te(arange(),2))
# main.m:440
res=[]
# main.m:442
for i in arange(1,length(R_idx_te)).reshape(-1):
te_ev=R_idx_te(i,arange())
# main.m:444
sp=dot(P(te_ev(1),arange()),Q(arange(),te_ev(2)))
# main.m:445
if any(te_ev(1) == auto_top_20_ids):
cnt=1
# main.m:448
for j in arange(1,length(unique_te)).reshape(-1):
if i == j:
continue
sn=dot(P(te_ev(1),arange()),Q(arange(),R_idx_te(j,2)))
# main.m:451
# all the events
if sn > sp:
cnt=cnt + 1
# main.m:454
# for another event than
# the one we had selected
res=concat([[res],[cnt]])
# main.m:458
## Ranking - Popularity
__,I=sort(sum(Rall.T,2),1,'descend',nargout=2)
# main.m:464
subidx_events=I(arange(1,1000))
# main.m:465
##
# Choose a subset to rank
auto_top_20_ids=subidx(arange(1,end()))
# main.m:468
unique_te=unique(R_idx_te(arange(),2))
# main.m:469
res_pop=[]
# main.m:471
for i in arange(1,length(R_idx_te)).reshape(-1):
te_ev=R_idx_te(i,arange())
# main.m:473
sp=find(I == te_ev(2))
# main.m:474
if any(te_ev(1) == auto_top_20_ids):
res_pop=concat([[res_pop],[sp]])
# main.m:476
## Popularity AUC
# Record auc values
auc_vals_pop=zeros(iter / 100000,1)
# main.m:483
for step in arange(1,iter).reshape(-1):
# Select a random positive example
i=randi(concat([1,length(R_idx_tr)]))
# main.m:488
iu=R_idx_tr(i,1)
# main.m:489
ii=R_idx_tr(i,2)
# main.m:490
ji=sample_neg(Rtr,iu)
# main.m:493
if mod(step,100000) == 0:
# Compute the Area Under the Curve (AUC)
auc=0
# main.m:498
for i in arange(1,length(R_idx_te)).reshape(-1):
te_i=randi(concat([1,length(R_idx_te)]))
# main.m:500
te_iu=R_idx_te(i,1)
# main.m:501
te_ii=R_idx_te(i,2)
# main.m:502
te_ji=sample_neg(Rall,te_iu)
# main.m:503
sp=sum(Rtr(arange(),te_ii))
# main.m:505
sn=sum(Rtr(arange(),te_ji))
# main.m:506
if sp > sn:
auc=auc + 1
# main.m:508
else:
if sp == sn:
auc=auc + 0.5
# main.m:508
auc=auc / length(R_idx_te)
# main.m:510
fprintf(concat(['AUC test: ',num2str(auc),'\n']))
auc_vals_pop[step / 100000]=auc
# main.m:512
## Top 20 dustribution
auto_top_20_ids=subidx(arange(1,50))
# main.m:519
unique_te=unique(R_idx_te(arange(),2))
# main.m:520
res_20=[]
# main.m:522
for i in arange(1,length(R_idx_te)).reshape(-1):
te_ev=R_idx_te(i,arange())
# main.m:524
sp=dot(P(te_ev(1),arange()),Q(arange(),te_ev(2)))
# main.m:525
if any(te_ev(1) == auto_top_20_ids):
cnt=1
# main.m:528
for j in arange(1,length(unique_te)).reshape(-1):
if i == j:
continue
sn=dot(P(te_ev(1),arange()),Q(arange(),R_idx_te(j,2)))
# main.m:531
# all the events
if sn > sp:
cnt=cnt + 1
# main.m:534
# for another event than
# the one we had selected
res_20=concat([[res_20],[cnt]])
# main.m:538
## Ranking plot
ranks=copy(res)
# main.m:544
figure
h=hist(res,500)
# main.m:546
scatter(arange(1,length(h),1),h,100,'MarkerEdgeColor',concat([0,0.5,0.5]),'MarkerFaceColor',concat([0,0.7,0.7]),'LineWidth',1.5)
hold('on')
h1=hist(res_pop,500)
# main.m:552
scatter(arange(1,length(h1),1),h1,100,'MarkerEdgeColor',concat([0.5,0,0.5]),'MarkerFaceColor',concat([0.5,0,0.7]),'LineWidth',1.5)
set(gca,'xscale','log')
set(gca,'yscale','log')
ylabel('Count')
xlabel('Ranking')
title('Event ranking distribution')
grid('on')
#set(gca, 'XTickLabel', num2str([1:1:50, 100:100:500, 1000:1000:2000]))
## Popularity plot
# popularity = f(#event)
a=sum(Rall.T,2)
# main.m:569
figure
h1=hist(a,unique(a))
# main.m:571
scatter(arange(1,length(h1),1),h1,100,'MarkerEdgeColor',concat([0,0.5,0.5]),'MarkerFaceColor',concat([0,0.5,0.7]),'LineWidth',1.5)
set(gca,'xscale','log')
set(gca,'yscale','log')
ylabel('# Events')
xlabel('Popularity')
title('Event popularity distribution')
grid('on')
##
a=sum(Rall,2)
# main.m:585
figure
h1=hist(a,unique(a))
# main.m:587
scatter(arange(1,length(h1),1),h1,100,'MarkerEdgeColor',concat([0.5,0,0.5]),'MarkerFaceColor',concat([0.5,0,0.7]),'LineWidth',1.5)
set(gca,'xscale','log')
set(gca,'yscale','log')
ylabel('# Sources')
xlabel('Events covered')
title('Source coverage distribution')
grid('on')
## Sanity check - Jay
# Check AUC score consistency
unique_te=unique(R_idx_te(arange(),2))
# main.m:602
auc=0
# main.m:604
for i in arange(1,length(R_idx_te)).reshape(-1):
te_ev=R_idx_te(i,arange())
# main.m:606
sp=dot(P(te_ev(1),arange()),Q(arange(),te_ev(2)))
# main.m:607
te_i=te_ev(2)
# main.m:609
while te_i == te_ev(2):
rand_i=randi(concat([1,length(R_idx_te)]))
# main.m:612
te_i=R_idx_te(rand_i,2)
# main.m:613
sn=dot(P(te_ev(1),arange()),Q(arange(),te_i))
# main.m:616
if sp > sn:
auc=auc + 1
# main.m:617
else:
if sp == sn:
auc=auc + 0.5
# main.m:617
auc=auc / length(R_idx_te)
# main.m:621
fprintf(concat(['AUC test: ',num2str(auc),'\n']))
##
alphas=concat([0.001,0.01,0.05,0.1,0.5,1])
# main.m:625
Ks=concat([2,5,10,20,30,50])
# main.m:626
figure
colormap('default')
imagesc(heatmap)
ylabel('Learning rate (\alpha)')
xlabel('Latent factors (K)')
set(gca,'XTickLabel',Ks)
set(gca,'YTickLabel',alphas)
title('AUC (2e7 iterations, 91421 observations, 5970 holdout)')
colorbar
## CV
R_idx_te,M_te,N_te,ids_test,names_test=gdelt_weekly_te('data/hashed',reload,dot(subset,tetr_ratio),nargout=5)
# main.m:641
R_idx_tr,M_tr,N_tr,ids_train,names_train=gdelt_weekly_tr('data/hashed',reload,dot(subset,(1 - tetr_ratio)),nargout=5)
# main.m:642
names=copy(names_train)
# main.m:644
M=max(M_te,M_tr)
# main.m:646
N=max(N_te,N_tr)
# main.m:647
R_idx=union(R_idx_te,R_idx_tr,'rows')
# main.m:649
Rall=sparse(R_idx(arange(),1),R_idx(arange(),2),1)
# main.m:650
idx_te=[]
# main.m:651
# Leave one out per source
for i in arange(1,N_te).reshape(-1):
idxs=find(R_idx_te(arange(),1) == i)
# main.m:655
if logical_not(isempty(idxs)):
rand_idx=randi(length(idxs),1)
# main.m:657
idx_te=concat([[idx_te],[idxs(rand_idx)]])
# main.m:658
# Keep only the heldout test samples
# Create a mask
not_idx_te=zeros(length(R_idx_te),1)
# main.m:664
not_idx_te[idx_te]=true
# main.m:665
not_idx_te=logical_not(not_idx_te)
# main.m:666
R_idx_tr=concat([[R_idx_tr],[R_idx_te(not_idx_te,arange())]])
# main.m:668
R_idx_te[not_idx_te,arange()]=[]
# main.m:669
# heldout samples
# Create the Source-Event interaction matrix
Rtr=sparse(R_idx_tr(arange(),1),R_idx_tr(arange(),2),1,N,M)
# main.m:673
Rte=sparse(R_idx_te(arange(),1),R_idx_te(arange(),2),1,N,M)
# main.m:674
##
iter=10000000.0
# main.m:676
alpha=0.1
# main.m:677
lambdas=concat([[0.0001],[0.001],[0.01],[0.1],[0.5],[1]])
# main.m:679
Ks=concat([[2],[5],[10],[20],[30],[50]])
# main.m:680
auc_cv=zeros(length(lambdas),length(Ks))
# main.m:682
for cv_iter_lambdas in arange(1,length(lambdas)).reshape(-1):
for cv_iter_ks in arange(1,length(Ks)).reshape(-1):
# Record auc values
auc_vals=zeros(iter / 100000,1)
# main.m:688
P=multiply(sigma,randn(N,Ks(cv_iter_ks))) + mu
# main.m:691
Q=multiply(sigma,randn(Ks(cv_iter_ks),M)) + mu
# main.m:692
for step in arange(1,iter).reshape(-1):
# Select a random positive example
i=randi(concat([1,length(R_idx_tr)]))
# main.m:697
iu=R_idx_tr(i,1)
# main.m:698
ii=R_idx_tr(i,2)
# main.m:699
ji=sample_neg(Rtr,iu)
# main.m:702
px=(dot(P(iu,arange()),(Q(arange(),ii) - Q(arange(),ji))))
# main.m:705
z=1 / (1 + exp(px))
# main.m:706
d=dot((Q(arange(),ii) - Q(arange(),ji)),z) - dot(lambdas(cv_iter_lambdas),P(iu,arange()).T)
# main.m:709
P[iu,arange()]=P(iu,arange()) + dot(alpha,d.T)
# main.m:710
d=dot(P(iu,arange()),z) - dot(lambdas(cv_iter_lambdas),Q(arange(),ii).T)
# main.m:713
Q[arange(),ii]=Q(arange(),ii) + dot(alpha,d.T)
# main.m:714
d=dot(- P(iu,arange()),z) - dot(lambdas(cv_iter_lambdas),Q(arange(),ji).T)
# main.m:717
Q[arange(),ji]=Q(arange(),ji) + dot(alpha,d.T)
# main.m:718
if mod(step,100000) == 0:
# Compute the Area Under the Curve (AUC)
auc=0
# main.m:723
for i in arange(1,length(R_idx_te)).reshape(-1):
te_i=randi(concat([1,length(R_idx_te)]))
# main.m:725
te_iu=R_idx_te(i,1)
# main.m:726
te_ii=R_idx_te(i,2)
# main.m:727
te_ji=sample_neg(Rall,te_iu)
# main.m:728
sp=dot(P(te_iu,arange()),Q(arange(),te_ii))
# main.m:730
sn=dot(P(te_iu,arange()),Q(arange(),te_ji))
# main.m:731
if sp > sn:
auc=auc + 1
# main.m:733
else:
if sp == sn:
auc=auc + 0.5
# main.m:733
auc=auc / length(R_idx_te)
# main.m:735
fprintf(concat(['AUC test: ',num2str(auc),'\n']))
auc_vals[step / 100000]=auc
# main.m:737
auc_cv[cv_iter_lambdas,cv_iter_ks]=max(auc_vals)
# main.m:742
## CV heatmap
figure
colormap('default')
imagesc(auc_cv)
ylabel('Regularization (\lambda)')
xlabel('Latent factors (K)')
set(gca,'XTickLabel',Ks)
set(gca,'YTickLabel',lambdas)
title('AUC (2e7 iterations, 91421 observations, 5968 holdout -- \alpha = 0.1)')
colorbar
## AUC plot
figure
xs=concat([arange(1,20000000.0,100000.0)])
# main.m:761
ys=copy(auc_vals)
# main.m:762
plot(xs,ys,'LineWidth',2.5)
hold('on')
plot(xs,multiply(ones(1,length(xs)),max(auc_vals)),'--','LineWidth',2.5)
plot(xs,auc_vals_pop)
grid('on')
ylabel('AUC')
xlabel('Iteration')
legend('AUC','max(AUC)')
title('AUC (2e7 iterations, \alpha=0.1, \lambda=0.01, K=20)')
## KNN popularity
# Record auc values
auc_vals_knn=zeros(iter / 100000,1)
# main.m:776
# Compute the Area Under the Curve (AUC)
auc=0
# main.m:779
r=randi(concat([1,length(R_idx_te)]),1,1000)
# main.m:780
for i in arange(1,length(r)).reshape(-1):
i
te_i=randi(concat([1,length(R_idx_te)]))
# main.m:784
te_iu=R_idx_te(r(i),1)
# main.m:785
te_ii=R_idx_te(r(i),2)
# main.m:786
te_ji=sample_neg(Rall,te_iu)
# main.m:787
knn_k=10
# main.m:789
n,d=knnsearch(Rtr(concat([arange(1,te_iu - 1),arange(te_iu + 1,end())]),arange()),Rtr(te_iu,arange()),'k',knn_k,'distance','jaccard',nargout=2)
# main.m:791
sp=sum(Rtr(n,te_ii))
# main.m:793
sn=sum(Rtr(n,te_ji))
# main.m:794
if sp > sn:
auc=auc + 1
# main.m:797
else:
if sp == sn:
auc=auc + 0.5
# main.m:797
auc=auc / length(r)
# main.m:799
fprintf(concat(['AUC test: ',num2str(auc),'\n']))
## Baseline run
# Get index of top 1K sources
__,I=sort(sum(Rall,2),1,'descend',nargout=2)
# main.m:805
subidx=I(plot_subset)
# main.m:806
# Record auc values
auc_vals_pop=zeros(iter / 100000,1)
# main.m:809
for step in arange(1,5).reshape(-1):
# Select a random positive example
i=randi(concat([1,length(R_idx_tr)]))
# main.m:814
iu=R_idx_tr(i,1)
# main.m:815
ii=R_idx_tr(i,2)
# main.m:816
ji=sample_neg(Rtr,iu)
# main.m:819
auc=0
# main.m:822
for i in arange(1,length(R_idx_te)).reshape(-1):
te_i=randi(concat([1,length(R_idx_te)]))
# main.m:824
te_iu=R_idx_te(i,1)
# main.m:825
te_ii=R_idx_te(i,2)
# main.m:826
te_ji=sample_neg(Rall,te_iu)
# main.m:827
sp=sum(Rtr(arange(),te_ii))
# main.m:829
sn=sum(Rtr(arange(),te_ji))
# main.m:830
if sp > sn:
auc=auc + 1
# main.m:832
else:
if sp == sn:
auc=auc + 0.5
# main.m:832
auc=auc / length(R_idx_te)
# main.m:834
fprintf(concat(['AUC test: ',num2str(auc),'\n']))
auc_vals_pop[step]=auc
# main.m:836
# Record auc values
auc_vals_knn=zeros(iter / 100000,1)
# main.m:842
# Compute the Area Under the Curve (AUC)
auc=0
# main.m:845
r=randi(concat([1,length(R_idx_te)]),1,50)
# main.m:846
for i in arange(1,length(r)).reshape(-1):
i
te_i=randi(concat([1,length(R_idx_te)]))
# main.m:850
te_iu=R_idx_te(r(i),1)
# main.m:851
te_ii=R_idx_te(r(i),2)
# main.m:852
te_ji=sample_neg(Rall,te_iu)
# main.m:853
knn_k=10
# main.m:855
n,d=knnsearch(Rtr(concat([arange(1,te_iu - 1),arange(te_iu + 1,end())]),arange()),Rtr(te_iu,arange()),'k',knn_k,'distance','jaccard',nargout=2)
# main.m:857
sp=sum(Rtr(n,te_ii))
# main.m:859
sn=sum(Rtr(n,te_ji))
# main.m:860
if sp > sn:
auc=auc + 1
# main.m:863
else:
if sp == sn:
auc=auc + 0.5
# main.m:863
auc=auc / length(r)
# main.m:865
fprintf(concat(['AUC test: ',num2str(auc),'\n']))
auc_vals_knn=copy(auc)
# main.m:867
##
load('../../clustering/W1/sources.csv')
ydata=concat([sources(arange(),3),sources(arange(),4)])
# main.m:872
subidx=sources(arange(),1)
# main.m:874
# Scatter plot t-SNE results
# figure;
# scatter(ydata(~plot_idx,1),ydata(~plot_idx,2));
# hold on;
# scatter(ydata(plot_idx,1),ydata(plot_idx,2), 300, 'r', 'filled');
figure
set(gca,'FontSize',30)
scatter(ydata(arange(),1),ydata(arange(),2),'MarkerEdgeColor',concat([0,0.5,0.5]),'MarkerFaceColor',concat([0,0.7,0.7]),'LineWidth',1.5)
plot_names=2
# main.m:889
# Overlay names
if plot_names == 1:
dx=0.75
# main.m:893
dy=0.1
# main.m:893
t=text(ydata(plot_idx,1) + dx,ydata(plot_idx,2) + dy,names_train(subidx(plot_idx)))
# main.m:894
set(t,'FontSize',30)
set(t,'FontWeight','bold')
else:
dx=0.1
# main.m:898
dy=0.1
# main.m:898
t=text(ydata(arange(),1) + dx,ydata(arange(),2) + dy,names_train(subidx))
# main.m:899
set(t,'FontSize',30)
xlabel('PC1')
ylabel('PC2')
title('t-SNE projection sources latent space P')
hold('off')
|
'''Unit test module for User class'''
import unittest
from user_data import User
class TestUser(unittest.TestCase):
'''
Test class that defines test cases for the user class behaviours.
Args:
unittest.TestCase: TestCase class that helps in creating test cases
'''
def setUp(self):
'''
Set up method to run before each test cases.
'''
self.new_account = User("qwerty12345")
def test_init(self):
'''
Test case to test if the object is initialized properly.
'''
self.assertEqual(self.new_account.master_password, "qwerty12345")
def tearDown(self):
'''
Clear after each test case runs
'''
User.user_accounts = []
def test_save_account(self):
'''
Test case to test if the account object is saved into the user_accounts list.
'''
self.new_account.save_account()
self.assertEqual(len(User.user_accounts), 1)
if __name__ == '__main__':
unittest.main()
|
'''
实验名称:网络时钟
版本:v1.0
日期:2020.7
作者:01Studio
'''
# 导入相关模块
from machine import Pin, I2C, RTC,Timer
from ssd1306 import SSD1306_I2C
import ntptime,network,time
# 定义星期和时间(时分秒)显示字符列表
week = ['Mon', 'Tues', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun']
time_list = ['', '', '']
# 初始化所有相关对象
i2c = I2C(sda=Pin(13), scl=Pin(14)) #I2C初始化:sda--> 13, scl --> 14
oled = SSD1306_I2C(128, 64, i2c, addr=0x3c)
rtc = RTC()
#WIFI连接函数,连接成功后更新时间
def WIFI_Connect():
WIFI_LED=Pin(2, Pin.OUT) #初始化WIFI指示灯
wlan = network.WLAN(network.STA_IF) #STA模式
wlan.active(True) #激活接口
start_time=time.time() #记录时间做超时判断
if not wlan.isconnected():
print('connecting to network...')
wlan.connect('01Studio', '88888888') #输入WIFI账号密码
while not wlan.isconnected():
#LED闪烁提示
WIFI_LED.value(1)
time.sleep_ms(300)
WIFI_LED.value(0)
time.sleep_ms(300)
#超时判断,15秒没连接成功判定为超时
if time.time()-start_time > 15 :
print('WIFI Connected Timeout!')
break
if wlan.isconnected():
#LED点亮
WIFI_LED.value(1)
#串口打印信息
print('network information:', wlan.ifconfig())
#OLED数据显示
oled.fill(0) #清屏背景黑色
oled.text('IP/Subnet/GW:',0,0)
oled.text(wlan.ifconfig()[0], 0, 20)
oled.text(wlan.ifconfig()[1],0,38)
oled.text(wlan.ifconfig()[2],0,56)
oled.show()
for i in range(5): #最多尝试获取5次时间
try:
ntptime.settime()
print(rtc.datetime())
time.sleep_ms(500)
return None
except:
print("Can not get time!")
def RTC_Run(tim):
datetime = list(rtc.datetime()) # 获取当前时间
#北京时间,月、日、星期需要适当调整
datetime[4]=datetime[4]+8 #北京时间,东八区
if datetime[4] >= 24:
datetime[4]=datetime[4]%24
if datetime[1] in [1,3,5,7,8,10,12]: #大月
datetime[2] = (datetime[2]+1)%32
else: datetime[2] = (datetime[2]+1)%31
datetime[3] = (datetime[3]+1)%8
oled.fill(0) # 清屏显示黑色背景
oled.text('01Studio', 0, 0) # 首行显示01Studio
oled.text('NTP Clock', 0, 15) # 次行显示实验名称
# 显示日期,字符串可以直接用“+”来连接
oled.text(str(datetime[0]) + '-' + str(datetime[1]) + '-' + str(datetime[2]) + ' ' + week[datetime[3]], 0, 40)
# 显示时间需要判断时、分、秒的值否小于10,如果小于10,则在显示前面补“0”以达
# 到较佳的显示效果
for i in range(4, 7):
if datetime[i] < 10:
time_list[i - 4] = "0"
else:
time_list[i - 4] = ""
# 显示时间
oled.text(time_list[0] + str(datetime[4]) + ':' + time_list[1] + str(datetime[5]) + ':' + time_list[2] + str(datetime[6]), 0, 55)
oled.show()
#执行WIFI连接函数
WIFI_Connect()
#开启RTOS定时器
tim = Timer(-1)
tim.init(period=300, mode=Timer.PERIODIC, callback=RTC_Run) #周期300ms
|
calling <function report at 0x7fd128ff1bf8> 22.5
# A function call always needs parenthesis, otherwise you get memory address of
# the function object. So, if we wanted to call the function named report, and
# give it the value 22.5 to report on, we could have our function call as
# follows:
print("calling")
report(22.5)
|
# 利用pip来安装selenium
"""
命令:pip install -U selenium
网站:https://pypi.org/project/selenium/
"""
|
"""Player apps"""
#Django
from django.apps import AppConfig
class PlayerAppConfig(AppConfig):
"""Player app config
The player contains the reference to the objects models like artists, songs
and albums.
"""
name = 'ceol.player'
verbose_name = 'Player'
|
from nsl.passes.ValidateSwizzle import ValidateSwizzleMask
import pytest
class TestValidateSwizzlePass:
def testMixFail(self):
with pytest.raises(Exception):
ValidateSwizzleMask('xyrg')
def testRepeatedWorks(self):
ValidateSwizzleMask('rrrr')
|
import unittest
from unittest import TestCase
from unittest.mock import patch
from featurehub_sdk.client_context import ClientContext
from featurehub_sdk.fh_state_base_holder import FeatureStateBaseHolder
class ClientContextTest(TestCase):
@patch('featurehub_repository.FeatureHubRepository')
@patch('edge_service.EdgeService')
def test_get_number(self, mock_repo, edge_service):
var = FeatureStateBaseHolder({'id': '123', 'key': 'FEATURE_TITLE_TO_UPPERCASE',
'l': True, 'version': 1, 'type': 'NUMBER', 'value': 3, 'strategies': []}, )
mock_repo.feature.return_value = var
client_context = ClientContext(mock_repo, edge_service)
result = client_context.get_number("bla")
self.assertEqual(result, 3)
@patch('featurehub_repository.FeatureHubRepository')
@patch('edge_service.EdgeService')
def test_get_number_when_not_number(self, mock_repo, mock_edge):
var = FeatureStateBaseHolder({'id': '123', 'key': 'FEATURE_TITLE_TO_UPPERCASE',
'l': True, 'version': 1, 'type': 'BOOLEAN', 'value': 'true', 'strategies': []}, )
mock_repo.feature.return_value = var
client_context = ClientContext(mock_repo, mock_edge)
result = client_context.get_number("bla")
self.assertEqual(result, None)
@patch('featurehub_repository.FeatureHubRepository')
@patch('edge_service.EdgeService')
def test_get_number_when_feature_is_none(self, mock_repo, mock_edge):
mock_repo.feature.return_value = None
client_context = ClientContext(mock_repo, mock_edge)
result = client_context.get_number("bla")
self.assertEqual(result, None)
if __name__ == '__main__':
unittest.main()
|
"""
link: https://leetcode-cn.com/problems/robot-room-cleaner
problem: 模拟扫地机器人的行为,只通过四个 API 接口情况下遍历整个房间
solution: DFS。首先抛开题目,遍历01矩阵只需要做dfs。本质上这道题是一致的,不过遍历矩阵可以通过栈直接回溯变量,本题多了一个全局的robot,
在回溯时令robot也回到上一状态即可。
"""
# """
# This is the robot's control interface.
# You should not implement it, or speculate about its implementation
# """
#class Robot:
# def move(self):
# """
# Returns true if the cell in front is open and robot moves into the cell.
# Returns false if the cell in front is blocked and robot stays in the current cell.
# :rtype bool
# """
#
# def turnLeft(self):
# """
# Robot will stay in the same cell after calling turnLeft/turnRight.
# Each turn will be 90 degrees.
# :rtype void
# """
#
# def turnRight(self):
# """
# Robot will stay in the same cell after calling turnLeft/turnRight.
# Each turn will be 90 degrees.
# :rtype void
# """
#
# def clean(self):
# """
# Clean the current cell.
# :rtype void
# """
class Solution:
def cleanRoom(self, robot):
def go_back():
robot.turnRight()
robot.turnRight()
robot.move()
robot.turnRight()
robot.turnRight()
move_list = [(-1, 0), (0, 1), (1, 0), (0, -1)]
visit = set()
def dfs(x, y, direction):
nonlocal visit
visit.add((x, y))
robot.clean()
for d in range(4):
if d != 0:
robot.turnRight()
ii, jj = move_list[(direction + d) % 4]
i, j = x + ii, y + jj
if (i, j) in visit:
continue
if not robot.move():
visit.add((i, j))
continue
dfs(i, j, (direction + d) % 4)
robot.turnRight()
go_back()
dfs(0, 0, 0)
|
"""
ConfigureCommand class for SubarrayNodeLow.
"""
# Standard Python imports
import json
# Third party imports
# Tango imports
import tango
from tango import DevFailed
# Additional import
from ska.base.commands import ResultCode
from ska.base import SKASubarray
from tmc.common.tango_client import TangoClient
from tmc.common.tango_server_helper import TangoServerHelper
from . import const
class Configure(SKASubarray.ConfigureCommand):
"""
A class for SubarrayNodeLow's Configure() command.
Configures the resources assigned to the Mccs Subarray Leaf Node.
"""
def do(self, argin):
"""
Method to invoke Configure command.
:param argin: DevString.
JSON string example is:
{"interface":"https://schema.skao.int/ska-low-tmc-configure/2.0","transaction_id":"txn-....-00001","mccs":{"stations":[{"station_id":1},{"station_id":2}],"subarray_beams":[{"subarray_beam_id":1,"station_ids":[1,2],"update_rate":0.0,"channels":[[0,8,1,1],[8,8,2,1],[24,16,2,1]],"antenna_weights":[1.0,1.0,1.0],"phase_centre":[0.0,0.0],"target":{"reference_frame":"HORIZON","target_name":"DriftScan","az":180.0,"el":45.0}}]},"sdp":{},"tmc":{"scan_duration":10.0}}
return:
A tuple containing a return code and a string message indicating status.
The message is for information purpose only.
rtype:
(ReturnCode, str)
raises:
JSONDecodeError if input argument json string contains invalid value
DevFailed if the command execution is not successful.
"""
device_data = self.target
device_data.is_scan_completed = False
device_data.is_release_resources = False
device_data.is_abort_command_executed = False
device_data.is_obsreset_command_executed = False
device_data.is_restart_command_executed = False
self.logger.info(const.STR_CONFIGURE_CMD_INVOKED_SA_LOW)
log_msg = f"{const.STR_CONFIGURE_IP_ARG}{argin}"
self.logger.info(log_msg)
self.this_server = TangoServerHelper.get_instance()
self.this_server.write_attr("activityMessage", const.STR_CONFIGURE_CMD_INVOKED_SA_LOW, False)
try:
scan_configuration = json.loads(argin)
except json.JSONDecodeError as jerror:
log_message = f"{const.ERR_INVALID_JSON}{jerror}"
self.logger.error(log_message)
self.this_server.write_attr("activityMessage", log_message, False)
tango.Except.throw_exception(
const.STR_CMD_FAILED,
log_message,
const.STR_CONFIGURE_EXEC,
tango.ErrSeverity.ERR,
)
tmc_configure = scan_configuration["tmc"]
device_data.scan_duration = int(tmc_configure["scan_duration"])
self._create_mccs_cmd_data(scan_configuration)
message = "Configure command invoked"
self.logger.info(message)
return (ResultCode.STARTED, message)
def _create_mccs_cmd_data(self, json_argument):
mccs_value = json_argument["mccs"]
json_argument["interface"] = "https://schema.skao.int/ska-low-mccs-configure/1.0"
if 'transaction_id' in json_argument:
del json_argument["transaction_id"]
if 'sdp' in json_argument:
del json_argument["sdp"]
if 'tmc' in json_argument:
del json_argument["tmc"]
if 'mccs' in json_argument:
del json_argument["mccs"]
json_argument.update(mccs_value)
input_to_mccs= json.dumps(json_argument)
self._configure_mccs_subarray("Configure", input_to_mccs)
def _configure_mccs_subarray(self, cmd_name, cmd_data):
try:
mccs_subarray_ln_fqdn = ""
property_val = self.this_server.read_property("MccsSubarrayLNFQDN")
mccs_subarray_ln_fqdn = mccs_subarray_ln_fqdn.join(property_val)
mccs_subarray_ln_client = TangoClient(mccs_subarray_ln_fqdn)
mccs_subarray_ln_client.send_command(cmd_name, cmd_data)
log_msg = "%s configured succesfully." % mccs_subarray_ln_fqdn
self.logger.debug(log_msg)
except DevFailed as df:
log_message = df[0].desc
self.this_server.write_attr("activityMessage", log_message, False)
log_msg = "Failed to configure %s. %s" % (
mccs_subarray_ln_fqdn,
df,
)
self.logger.error(log_msg)
|
from arima.arima_model import fit_arma
from arima.process import generate_process, get_forecast
from arima.model_selection import choose_arma_model, choose_arima_model
from arima.stat_test import augmented_dickey_fuller_fit, ADFBootstrap, wald_stat
|
#! /usr/bin/env python2.5
#
# orange4.py -- an interpreter for a simple lisp-like language
# [under same license as Python]
# (inspired by Peter Norvig's JScheme)
#
# =================================================================
# + Notes +
# =================================================================
#
# - This is just a silly hack for a simple
# lisp-like interpreter in Python.
# - Python tuples are used for the language.
# To overcome the fact that freevars will cause
# Python to error out, invoke the Using class
# as the first argument of the form: and pass
# in a string of all freevars used in the
# second form.
# - The use of Python tuples makes things
# really, really UGLY!! Other than the
# commas everywhere, you have to be careful
# about singleton tuples: for example,
# for a LET, you have a single binding, say
# (a, 10); you can't say
# (let, ((a, 10)) ...)
# you have to:
# (let, ((a, 10),) ...)!!!
#
# =================================================================
# + Examples (see the function test for more) +
# =================================================================
#
# > oeval((using('a b'), (let, ((a, 10), (b, 20)), (pr, (plus, a, b)))))
# 30
#
# > oeval((using('a b c'), (oif, (let, ((a, 100), (b, 200)),
# (let, ((c, (gt, a, b)),),
# (oif, c, (pr, 'greater'), (pr, 'lower')),
# c)),
# 'hi',
# 'bye')))
# lower
# 'bye'
#
# [User-defined functions]
#
# > oeval((using('a x y z'), (let, ((a, (olambda, (x, y, z),
# (plus, (plus, x, y), z),)),), (a, 100, 200, 300))))
# 600
__version__ = "$Id: orange4.py,v 1.10 2006/12/09 19:47:42 sri Exp $"
trace = False
class oexc(Exception):
pass
def error(s):
raise oexc(s)
class var:
def __init__(self, name):
self.name = name
def __str__(self):
return "<lispvar %s>" % self.name
__repr__ = __str__
# I can't say something like
# oeval((let, ((a, 10), (b, 20)) ...)):
# Python will complain about a and b.
# So we do something like this:
# oeval((using('a b'), (let, ((a, 10), (b, 20)) ...))):
class using:
def __init__(self, string):
for name in string.split():
if name in globals():
print "Warning: `%s' already defined, ignoring it" % name
else:
globals()[name] = var(name)
# names that conflict with Python
# builtins start with an 'o'
primitives = """
oif pr plus minus
true nil olambda let gt
closure eq multiply
"""
for prim in primitives.split():
globals()[prim] = prim
globals()[prim.upper()] = prim
# =================================================================
# + Eval & Apply +
# =================================================================
def oeval(x, env=None):
if trace: print 'oeval', x, env
if isinstance(x, var):
return env_lookup(x, env)
elif not isinstance(x, tuple):
return x
if isinstance(x[0], using):
x = x[1]
fn = x[0]
args = x[1:]
if fn == OIF:
if oeval(args[0], env) == NIL:
return oeval(args[2], env)
else:
return oeval(args[1], env)
elif fn == OLAMBDA:
return (CLOSURE, args[0], args[1:], env)
elif fn == LET:
bindings = args[0]
newenv = dict((var.name, oeval(val, env))
for (var, val) in bindings)
return oevalis(args[1:], [newenv, env])
else: # function application
# global env is the python global env,
# so we don't need to evaluate fn (i think).
return oapply(oeval(fn, env),
tuple(oeval(arg, env) for arg in args))
# since we are applying fns, args are evalled
def oapply(fn, args):
def err(type):
error("%s %s, %s" % (
type,
(fn.name if isinstance(fn, var) else fn),
args))
if not isinstance(fn, tuple):
if fn == GT:
if args[0] > args[1]:
return TRUE
return NIL
elif fn == PR:
print args[0]
return NIL
elif fn == PLUS:
return args[0] + args[1]
elif fn == MINUS:
return args[0] - args[1]
elif fn == EQ:
# args should just be numbers
if args[0] == args[1]:
return TRUE
return NIL
elif fn == MULTIPLY:
return args[0] * args[1]
else:
err("unknown function")
# user-defined functions:
elif fn[0] == CLOSURE:
if trace: print 'calling closure', fn, args
formal_params, body, env_when_defined = fn[1:]
actual_params = args
if len(formal_params) != len(actual_params):
err("wrong number of args")
newenv = dict(zip((x.name for x in formal_params),
actual_params))
# lexical scoping
return oevalis(body, [newenv, env_when_defined])
# return oevalis(body, [newenv, env])
# the above specifies dynamic scoping (i think!)
# env, should be an env passed
# to this oapply fn.
else:
err("unknown function")
def oevalis(body, env):
result = nil
for x in body:
result = oeval(x, env)
return result
# envs are stacked like so: [{'a', 1}, [{'b': 2, 'c': 3}, {}]]
#
# env is either a non-True value, or
# a 2-element list:
# - the 1st element is a dict
# - the 2nd element is an env
def env_lookup(var, env):
if not env:
error("unbound variable %s: %s" % (var.name, env))
env, parents = env
if var.name in env:
return env[var.name]
return env_lookup(var, parents)
# =================================================================
# + Tests +
# =================================================================
def test():
def really_assert(expected, form):
try:
actual = oeval(form)
except oexc, ex:
print "lisperror while evaluating form: %s\n%s" % (
str(ex),
str(form))
else:
if expected != actual:
print "expected `%s' but got `%s':\n%s\n%s\n%s" % (
expected, actual,
"="*50,
str(form),
"="*50)
# =================
really_assert(30, (using('a b'), (let, ((a, 10), (b, 20)),
(pr, (plus, a, b)),
(plus, a, b))))
really_assert("bye", (using('a b c'),
(oif, (let, ((a, 100), (b, 200)),
(let, ((c, (gt, a, b)),),
(oif, c, (pr, 'greater'), (pr, 'lower')),
c)),
'hi',
'bye')))
really_assert(600, (using('a x y z'),
(let, ((a, (olambda, (x, y, z),
(plus, (plus, x, y), z),)),),
(a, 100, 200, 300))))
# the y combinator:
# translated from
# http://www.ece.uc.edu/~franco/C511/html/Scheme/ycomb.html
really_assert(3628800,
(using('ycomb x proc arg fact fnarg n'),
(let, ((ycomb, (olambda, (x,),
((olambda, (proc,),
(x, (olambda, (arg,), ((proc, proc), arg)))),
(olambda, (proc,),
(x, (olambda, (arg,), ((proc, proc), arg))))))),),
(let, ((fact, (olambda, (fnarg,),
(olambda, (n,),
(oif, (eq, n, 0),
1,
(multiply, n,
(fnarg, (minus, n, 1))))))),),
(pr, "hi"),
(pr, ((ycomb, fact), 10)),
((ycomb, fact), 10)))))
|
from mayan.apps.appearance.classes import Icon
icon_task_manager = Icon(driver_name='fontawesome', symbol='braille')
|
from django.apps import AppConfig
class SeedConfig(AppConfig):
name = 'seed'
|
from abc import ABCMeta, abstractmethod
from ml_keeker.common import RegexRepository
class Handler(metaclass=ABCMeta):
@abstractmethod
def handle(self):
pass
@abstractmethod
def close(self):
pass
class EventHandler(Handler):
def __init__(self, _filter: dict, logger=None):
self._regex_repo = RegexRepository()
self._filter = _filter
self.event_list = list(self._filter.keys())
self.logger = logger
def _parse(self, text) -> dict:
"""
Description:
Parse text based on format
Keyword Arguments:
text -- str, not parsed log text
Return:
parse_dict -- dict, parsed dictionary
Example of return:
{
'asctime': '2019-02-19 10:51:38,733',
'filename': 'entry_point.py',
'level': 'INFO',
'lineno': '153',
'message': "choosing ./shared/confAT0221-sixteen-jupiter-glucose-eight12.json among ['/shared/confAT0221-sixteen-jupiter-glucose-eight12.json']"
}
"""
log_format = self._regex_repo.log_format
parse_dict = self._regex_repo.string_to_dict(text, log_format)
return parse_dict
def _classify(self, message) -> str :
""" Classify log event to some Category"""
matched_list = []
for event in self.event_list:
temp_list = []
patterns = self._filter.get(event).get('pattern')
if not type(patterns) == list:
continue
for fmt in patterns:
sub_matched = self._regex_repo.check_matched(message.lower(), fmt.lower())
temp_list.append(sub_matched)
matched = any(temp_list)
matched_list.append((event, matched))
# For handling exceptional case(2 more matched event label)
cnt = 0
event_label = []
for event, matched in matched_list:
if matched:
event_label.append(event)
cnt += 1
if cnt == 1:
return event_label[0]
elif cnt > 1:
raise Exception("2 more matched event label. labels: %s message: %s" % (' '.join(event_label), message))
else:
self.logger.debug("No matched label. Check filter pattern. message: %s" % message)
return 'NOTFOUND'
def handle(self, text) -> dict :
""" Return log event label based on filter rule"""
parse_dict = self._parse(text)
self.logger.debug(parse_dict)
message = parse_dict.get('message')
event_label = self._classify(message)
copy_dict = parse_dict.copy()
tmp_label = self._filter.get(event_label, False)
if tmp_label:
extract = tmp_label.get('extract', False)
else:
extract = False
if extract is None or not extract:
copy_dict['event_label'] = event_label
return copy_dict
else:
patterns = self._filter[event_label].get('pattern')
extracted_dict = None
for pattern in patterns:
if extracted_dict is not None:
break
extracted_dict = self._regex_repo.string_to_dict(message, pattern)
copy_dict.update(extracted_dict)
copy_dict['event_label'] = event_label
return copy_dict
def close(self):
pass
|
from unittest import mock
from django.test import TestCase
from two_factor import webauthn_utils
from two_factor.models import WebauthnDevice
from webauthn.webauthn import COSE_ALG_ES256, COSE_ALG_PS256, COSE_ALG_RS256
from .utils import UserMixin
class WebAuthnUtilsTest(UserMixin, TestCase):
def setUp(self):
super().setUp()
self.user = self.create_user()
def test_get_credentials(self):
user_webauthn_devices = [
WebauthnDevice(
user=self.user,
public_key=f'public-key-{pk}',
key_handle=f'key-handle-{pk}',
sign_count=0,
) for pk in range(3)
]
max_excluded_credentials = len(user_webauthn_devices) - 1
webauthn_device_objects=mock.Mock(
filter=mock.Mock(return_value=user_webauthn_devices),
)
with mock.patch(
'two_factor.webauthn_utils.WebauthnDevice.objects',
new_callable=mock.PropertyMock,
return_value=webauthn_device_objects,
), self.settings(MAX_EXCLUDED_CREDENTIALS=max_excluded_credentials):
credentials = webauthn_utils.get_credentials(self.user)
assert credentials == [
{'id': 'key-handle-0', 'type': 'public-key'},
{'id': 'key-handle-1', 'type': 'public-key'},
]
def test_make_credential_options(self):
encoded_urandom = 'a-b64-encoded-random-number'.encode('utf-8')
encoded_hashed_id = 'a-b64-encoded-hashed-id'.encode('utf-8')
credentials = 'a-list-of-credentials'
relying_party = {'id': 'rp-id', 'name': 'rp-name'}
with mock.patch(
'two_factor.webauthn_utils._webauthn_b64_encode',
side_effect=[encoded_urandom, encoded_hashed_id],
) as _webauthn_b64_encode, mock.patch(
'two_factor.webauthn_utils.get_credentials',
return_value=credentials,
) as get_credentials:
output = webauthn_utils.make_credential_options(
user=self.user, relying_party=relying_party)
assert _webauthn_b64_encode.call_count == 2
assert get_credentials.called
pub_key_cred_params = output.pop('pubKeyCredParams')
assert sorted(pub_key_cred_params, key=lambda x: x['alg']) == sorted([
{'alg': COSE_ALG_ES256, 'type': 'public-key'},
{'alg': COSE_ALG_RS256, 'type': 'public-key'},
{'alg': COSE_ALG_PS256, 'type': 'public-key'},
], key=lambda x: x['alg'])
assert output == {
'challenge': 'a-b64-encoded-random-number',
'rp': relying_party,
'user': {'id': 'a-b64-encoded-hashed-id', 'name': self.user.email, 'displayName': ''},
'timeout': 60000,
'excludeCredentials': credentials,
'attestation': 'direct',
'extensions': {'webauthn.loc': True},
'authenticatorSelection': {'userVerification': 'discouraged'},
}
|
"""
from gcn.etc.dbconfig import DBCONFIG
clinvardb clinvitae clnphesnpdb cosmic dbsnp esp exac geneontology hgmd kgdb mimdb mirna nsfpdb refgene refmrna regulomedb splicedb utrdb
"""
from collections import namedtuple
from gcn.lib.utils import lib_utils
def get_vkey(chrom,pos,ref,alt):
#assembly = 'b37'
return '%s_%s_%s_%s' % (chrom, pos, ref, alt)
class VariantKey:
def __init__(self, chrom, pos, ref, alt):
#self.assembly = 'b37'
self.chrom = chrom
self.pos = pos
self.ref = ref
self.alt = alt
self.rsids = []
self.genes = []
self.dbs = {}
self.initialize_db()
def initialize_db(self):
DBINDEX = [['EXAC', 'snps', 'idx', []],
['SNPDB', 'snps', 'idx', []],
['KGDB', 'snps', 'idx', []],
['ESP', 'snps', 'idx', []],
['CLINVARDB', 'snps', 'idx', []],
['CLINVITAE', 'snps', 'idx', []],
['REGULOME', 'regulome', 'idx', []],
['CLNPHESNP', 'clnsnp', 'idx', []],
['HGMDDB', 'snps', 'idx', []],
['COSMIC', 'snps', 'idx', []],
['NSFP', 'nsfp', 'idx', []],
['SPLICE', 'splice', 'idx', []],
['MIRNA', 'mirna', 'idx', []],
['OMIM', 'omim', 'idx', []]]
for tbFmt in DBINDEX:
self.dbs[tbFmt[0]] = tbFmt[-1]
class DbLink:
def __init__(self,out_fn):
self.vKeys = {}
self.tsv = out_fn
self.fpw = self.print_header(out_fn)
def add_key(self,chrom,pos,ref,alt,\
db_name=None,primary_idx=None,\
rsid = None):
vkey = get_vkey(chrom,pos,ref,alt)
if vkey not in self.vKeys:
self.vKeys[vkey] = VariantKey(chrom,pos,ref,alt)
if db_name and primary_idx:
self.vKeys[vkey].dbs[db_name]=primary_idx
if rsid:
self.vKeys[vkey].rsids.append(rsid)
def add_snpid(self,rsid,db_name,primary_index):
for vkey in self.vKeys.iterkeys():
if rsid in self.vKeys[vkey].rsids:
self.vKeys[vkey].dbs[db_name]=primary_index
break
def add_genes(self,gene):
for vkey in self.vKeys.iterkeys():
self.vKeys[vkey].genes.append(gene)
def add_primary_idx(self,db_name,primary_index):
for vkey in self.vKeys.iterkeys():
if isinstance(self.vKeys[vkey].dbs[db_name],list):
if isinstance(primary_index,list):
self.vKeys[vkey].dbs[db_name].extend(list(set(primary_index)))
else:
self.vKeys[vkey].dbs[db_name].append(primary_index)
else:
self.vKeys[vkey].dbs[db_name]=primary_index
def print_header(self,tsv):
DBINDEX = [['EXAC', 'snps', 'idx', []],
['SNPDB', 'snps', 'idx', []],
['KGDB', 'snps', 'idx', []],
['ESP', 'snps', 'idx', []],
['CLINVARDB', 'snps', 'idx', []],
['CLINVITAE', 'snps', 'idx', []],
['REGULOME', 'regulome', 'idx', []],
['CLNPHESNP', 'clnsnp', 'idx', []],
['HGMDDB', 'snps', 'idx', []],
['COSMIC', 'snps', 'idx', []],
['NSFP', 'nsfp', 'idx', []],
['SPLICE', 'splice', 'idx', []],
['MIRNA', 'mirna', 'idx', []],
['OMIM', 'omim', 'idx', []]]
heads = ['variant_index','chrom','pos','ref','alt','rsid']
fpw = open(tsv,'w')
for dbindex in DBINDEX:
heads.append('%s'%('.'.join(dbindex[:-1])))
fpw.write('#%s\n'%(lib_utils.joined(heads,'\t')))
return fpw
def print_vkey(self,snpcnt):
DBINDEX = ['EXAC','SNPDB','KGDB','ESP','CLINVARDB','CLINVITAE',\
'REGULOME','CLNPHESNP','HGMDDB',\
'COSMIC','NSFP','SPLICE','MIRNA','OMIM']
if self.vKeys:
for cVkey in self.vKeys.itervalues():
cols = [snpcnt,cVkey.chrom,cVkey.pos,cVkey.ref,cVkey.alt,lib_utils.joined(cVkey.rsids,',')]
for tb in DBINDEX:
tbIdx = cVkey.dbs[tb]
if isinstance(tbIdx,list):
tbIdx = lib_utils.joined(list(set(tbIdx)), ',')
if not tbIdx:
tbIdx = 'NULL'
cols.append(tbIdx)
else:
if not tbIdx:
tbIdx = 'NULL'
cols.append(tbIdx)
self.fpw.write('%s\n'%(lib_utils.joined(cols,'\t')))
del cols
self.cleanup()
def cleanup(self):
DBINDEX = ['EXAC','SNPDB','KGDB','ESP','CLINVARDB','CLINVITAE',\
'REGULOME','CLNPHESNP','HGMDDB',\
'COSMIC','NSFP','SPLICE','MIRNA','OMIM']
if self.vKeys:
for cVkey in self.vKeys.itervalues():
for tb in DBINDEX:
del cVkey.dbs[tb]
cVkey.dbs.clear()
del cVkey.rsids
del cVkey.genes
cVkey.rsids = []
cVkey.genes = []
self.vKeys.clear()
self.vKeys = {}
if __name__ == '__main__':
#initialize
outfile = '/tmp/test'
outfile_dblink = outfile + '.tsv'
d = 'creating dblink tsv file [%s].' % outfile_dblink
print d
cDbLink = DbLink(outfile_dblink)
chrom,pos,ref,alt = ['chr1', '1234', 'A', 'C']
cDbLink.add_key(chrom, pos, ref, alt)
cDbLink.add_key(chrom, pos, ref, alt, 'EXAC', '4')
chrom, pos, ref, alt = ['chr1', '1234', 'A', 'G']
cDbLink.add_key(chrom, pos, ref, alt, 'SNPDB', '4',rsids = ['rs1234','rs3456'])
cDbLink.add_snpid('rs1234', 'REGULOME', '6')
cDbLink.add_snpid('rs3456', 'CLNPHESNP', '9')
cDbLink.add_primary_idx('SPLICE', '3')
cDbLink.add_genes('CA3AR1')
cDbLink.print_vkey(1)
cDbLink.fpw.close()
d = 'Done [%s].' % outfile_dblink
print d
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017 - for information on the respective copyright owner
# see the NOTICE file and/or the repository https://github.com/boschresearch/statestream
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import keras
from keras import backend as K
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Model
from keras.layers import Activation, \
Add, \
Concatenate, \
Conv2D, \
Dense, \
Dropout, \
Flatten, \
GaussianNoise, \
Input, \
Lambda, \
MaxPooling2D, \
UpSampling2D
from keras.datasets.cifar import load_batch
from keras.datasets import mnist
from keras.engine import Layer
import os
import sys
import numpy as np
import pickle
import copy
from ruamel_yaml import YAML
from time import strftime, gmtime
import scipy
from scipy import ndimage
import keras_model_builder
from keras_model_builder import StGraph_2_keras
from stdatasets import Stdataset
def print_help():
"""Function to print help instructions to konsole.
"""
print("\nTo train a model, the model name and mode has to be specified:")
print(" python keras_model_trainer.py model_specifications/<model name>.st_graph <mode>")
print(" Available models: \n")
for s in available_specs:
print(" model_specifications/" + s)
print(" Modes: streaming, sequential")
print("")
# Get available specification files.
available_specs = ["model_specifications" + os.sep + a for a in os.listdir("./model_specifications/")]
# Check input arguments.
if len(sys.argv) != 3:
print_help()
sys.exit()
if sys.argv[1] not in available_specs:
print_help()
sys.exit()
if sys.argv[2] not in ["streaming", "sequential"]:
print_help()
sys.exit()
# Get model name from parameters.
model_name = sys.argv[1].split(".")[0].split("/")[1]
# Flag for full (computational expensive) model evaluation during training.
full_eval = True
# Get meta data (training epochs, input noise, rollout window size).
if model_name.startswith("mnist"):
epochs = 100
noise_std = 2.0
rollout_window = 8
repetitions = 6
elif model_name.startswith("cifar"):
epochs = 100
noise_std = 1.0
rollout_window = 8
repetitions = 1
elif model_name.startswith("gtsrb"):
epochs = 10 # 100
noise_std = 0.5
rollout_window = 8
repetitions = 4 # 12
else:
epochs = 100
noise_std = 1.0
rollout_window = 8
repetitions = 1
# Load model specification to dictionary stg.
dataset = None
try:
yaml = YAML()
stg = yaml.load(open(sys.argv[1], "r"))
dataset = stg["interfaces"]["data"]["type"]
except:
print("\nError: Unable to load specification from " + str(sys.argv[1]))
sys.exit()
# Load and prepare dataset.
DS = Stdataset(dataset)
# Repeat everything.
for rep in range(repetitions):
# Generate keras model from specification.
KM = StGraph_2_keras(stg, sys.argv[2], rollout_window, noise_std)
# Shuffle dataset for every repetition.
DS.shuffle()
# Generate rolled out datasets for cifar10 and mnist.
# For gtsrb we will sample tracks epoch-wise.
DATAx = {}
DATAy = {}
if dataset in ["cifar10", "mnist"]:
for d in ["train", "valid", "test"]:
DATAx[d] = np.concatenate([DS.DATAX[d] for r in range(KM.rollouts + 1)], axis=3)
DATAy[d] = np.concatenate([DS.DATAY[d] for r in range(len(KM.M["outputs"]))], axis=1)
elif dataset == "gtsrb":
for d in ["valid", "test"]:
DATAx[d] = np.zeros([DS.DATAX[d].shape[0],
DS.DATAX[d].shape[1],
DS.DATAX[d].shape[2],
3 * (KM.rollouts + 1)])
rnd_startframe = np.random.randint(low=0, high=30 - KM.rollouts - 2, size=[DS.DATAX[d].shape[0],])
for i in range(DS.DATAX[d].shape[0]):
DATAx[d][i,:,:,:] = DS.DATAX[d][i,:,:,3 * rnd_startframe[i]:3 * (rnd_startframe[i] + KM.rollouts + 1)]
# Set ground truth tensor.
DATAy[d] = np.concatenate([DS.DATAY[d] for r in range(len(KM.M["outputs"]))], axis=1)
# Dictionary to store results during training.
results = {}
# Dictionary to store epoch-wise accuracies.
for d in ["train", "valid", "test"]:
results[d + "_acc"] = []
# Train and evaluate model for the specified number of epochs.
for e in range(epochs):
# For the GTSRB dataset we use tracks not repeated samples.
# As "temporal augmentation" we sample a different starting
# frame for every track.
if dataset == "gtsrb" and e == 0:
# For training we sample from the entire track.
DATAx["train"] = np.zeros([DS.DATAX["train"].shape[0],
DS.DATAX["train"].shape[1],
DS.DATAX["train"].shape[2],
3 * (KM.rollouts + 1)])
rnd_startframe = np.random.randint(low=0, high=30 - KM.rollouts - 2, size=[DS.DATAX["train"].shape[0],])
for i in range(DATAx["train"].shape[0]):
DATAx["train"][i,:,:,:] = DS.DATAX["train"][i,:,:,rnd_startframe[i] * 3:(rnd_startframe[i] + KM.rollouts + 1) * 3]
# Set ground truth tensor.
DATAy["train"] = np.concatenate([DS.DATAY["train"] for r in range(len(KM.M["outputs"]))], axis=1)
# Train the rollout window for one epoch.
print("Training epoch " + str(e + 1) + " / " + str(epochs))
KM.train_epoch(DATAx, DATAy)
print("TRAIN SHAPES: " + str(DATAy["train"].shape) + " " + str(DS.DATAY["train"].shape))
# Evaluate model.
accuracy = {}
for a in ["train", "valid", "test"]:
if (full_eval and a in ["train", "valid"]) or (a == "test" and e == epochs - 1):
# Compute logits on dataset.
current_logits = []
# ["logits"][epoch][batch][rollouts][batchsize, 10]
for b in range(DATAx[a].shape[0] // KM.batchsize):
batch = DATAx[a][b * KM.batchsize:(b + 1) * KM.batchsize]
outs = KM.M["output function"]([batch] + [0])
current_logits.append(outs)
# Compute test accuracy of current epoch.
# For every batch, stack rollouts.
current_batch = []
for b in range(len(current_logits)):
current_batch.append(np.stack(current_logits[b], axis=-1))
current_epoch = np.concatenate(current_batch)
current_classes = np.argmax(current_epoch, axis=1)
# Compute accuracies and store results.
accuracy[a] = np.zeros([current_classes.shape[1],])
for s in range(current_epoch.shape[0]):
for r in range(current_classes.shape[1]):
if np.argmax(DS.DATAY[a][s,:]) == current_classes[s][r]:
accuracy[a][r] += 1.
accuracy[a] /= current_epoch.shape[0]
results[a + "_acc"].append(accuracy[a])
# Print some evaluation information.
print(" epoch " + str(e) + "/" + str(epochs) + " " + a + " acc.: " + str((100 * accuracy[a]).astype(np.int32)))
# Save trained model.
results_file = "model_trained/" + model_name + "-" + sys.argv[2] + "-" + str(rep)
KM.M["model"].save_weights(results_file + ".h5")
M_json = KM.M["model"].to_json()
with open(results_file + ".json", "w") as model_file:
model_file.write(M_json)
pickle.dump(results, open(results_file + ".results", "wb"))
|
"""Helper functions to read from library."""
import os
from dotenv import dotenv_values
def load_dotenv_config(dotenv_path=None, verbose=False, **kwargs):
"""Load config file at either `dotenv_path`, env var `PF_CONFIG_DOTENV_PATH`."""
return dotenv_values(dotenv_path=dotenv_path, verbose=verbose, **kwargs)
def get_envvar(prefix="PF_"):
"""get all variables that start with the given prefix."""
prefix_len = len(prefix)
return {
k[prefix_len:].lower(): v.strip()
for k, v in os.environ.items()
if k.startswith(prefix)
}
def load_config(prefix="PF_"):
"""Load configuration from environmental file and environmental variables."""
envvar_settings = get_envvar(prefix=prefix)
verbose = envvar_settings.get("verbose", False)
dotenv_path = envvar_settings.get("config_dotenv_path", None)
settings = load_dotenv_config(dotenv_path=dotenv_path, verbose=verbose)
settings.update(envvar_settings)
return settings
|
import keras.layers
from .utils import ensure_tf_type
def convert_relu(node, params, layers, node_name, keras_name):
"""
Convert ReLU activation layer
:param node: current operation node
:param params: operation attributes
:param layers: available keras layers
:param node_name: internal converter name
:param keras_name: resulting layer name
:return: None
"""
if len(node.input) != 1:
assert AttributeError('More than 1 input for an activation layer.')
input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name)
relu = keras.layers.Activation('relu', name=keras_name)
layers[node_name] = relu(input_0)
def convert_lrelu(node, params, layers, node_name, keras_name):
"""
Convert LeakyReLU activation layer
:param node: current operation node
:param params: operation attributes
:param layers: available keras layers
:param node_name: internal converter name
:param keras_name: resulting layer name
:return: None
"""
if len(node.input) != 1:
assert AttributeError('More than 1 input for an activation layer.')
input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name)
leakyrelu = \
keras.layers.LeakyReLU(alpha=params['alpha'], name=keras_name)
layers[node_name] = leakyrelu(input_0)
def convert_sigmoid(node, params, layers, node_name, keras_name):
"""
Convert Sigmoid activation layer
:param node: current operation node
:param params: operation attributes
:param layers: available keras layers
:param node_name: internal converter name
:param keras_name: resulting layer name
:return: None
"""
if len(node.input) != 1:
assert AttributeError('More than 1 input for an activation layer.')
input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name)
sigmoid = keras.layers.Activation('sigmoid', name=keras_name)
layers[node_name] = sigmoid(input_0)
def convert_tanh(node, params, layers, node_name, keras_name):
"""
Convert Tanh activation layer
:param node: current operation node
:param params: operation attributes
:param layers: available keras layers
:param node_name: internal converter name
:param keras_name: resulting layer name
:return: None
"""
if len(node.input) != 1:
assert AttributeError('More than 1 input for an activation layer.')
input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name)
tanh = keras.layers.Activation('tanh', name=keras_name)
layers[node_name] = tanh(input_0)
def convert_selu(node, params, layers, node_name, keras_name):
"""
Convert SELU activation layer
:param node: current operation node
:param params: operation attributes
:param layers: available keras layers
:param node_name: internal converter name
:param keras_name: resulting layer name
:return: None
"""
if len(node.input) != 1:
assert AttributeError('More than 1 input for an activation layer.')
input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name)
selu = keras.layers.Activation('selu', name=keras_name)
layers[node_name] = selu(input_0)
def convert_softmax(node, params, layers, node_name, keras_name):
"""
Convert softmax activation layer
:param node: current operation node
:param params: operation attributes
:param layers: available keras layers
:param node_name: internal converter name
:param keras_name: resulting layer name
:return: None
"""
if len(node.input) != 1:
assert AttributeError('More than 1 input for an activation layer.')
input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name)
softmax = keras.layers.Activation('softmax', name=keras_name)
layers[node_name] = softmax(input_0)
|
from Attack.ParameterTypes.String import String
class SpecificString(String):
def __init__(self, args: list):
super(SpecificString, self).__init__(*args)
self.name = "String"
def validate(self, value) -> (bool, str):
is_valid = String.validate(self, value)
args = []
if is_valid and self.args:
for elem in self.args:
if not isinstance(elem, str):
break
args.append(elem)
is_valid = value in args
return is_valid, value
|
import pytest
from pynextion.events import (
Event,
MsgEvent,
TouchEvent,
CurrentPageIDHeadEvent,
PositionHeadEvent,
SleepPositionHeadEvent,
StringHeadEvent,
NumberHeadEvent,
CommandSucceeded,
EmptyMessage,
EventLaunched
)
from pynextion.exceptions import NexMessageException
from pynextion.constants import Return
from pynextion.int_tools import limits
def test_event_touch_constants():
assert Event.Touch.Press.value == 0x01
def test_event_cmd_success():
msg = [0x01, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, CommandSucceeded)
assert evt.issuccess()
def test_event_cmd_error_invalid_page_id():
msg = [0x03, 0xff, 0xff, 0xff]
with pytest.raises(NexMessageException):
MsgEvent.parse(msg)
def test_event_empty_message():
msg = []
evt = MsgEvent.parse(msg)
assert isinstance(evt, EmptyMessage)
assert evt.isempty()
def test_event_launched():
msg = [0x88, 0xff, 0xff, 0xff]
# System successful start up
evt = MsgEvent.parse(msg)
assert isinstance(evt, EventLaunched)
# def test_event_undef():
# msg = [0x00, 0x00, 0x00, 0xff, 0xff, 0xff]
# evt = MsgEvent.parse(msg)
# assert isinstance(evt, EventLaunched)
def test_event_touchevent():
msg = [0x65, 0x00, 0x02, 0x01, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, TouchEvent)
assert evt.code == Return.Code.EVENT_TOUCH_HEAD
assert evt.pid == 0x00
assert evt.cid == 0x02
assert evt.tevts == Event.Touch.Press # touch event state
def test_CurrentPageIDHeadEvent():
msg = [0x66, 0x02, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, CurrentPageIDHeadEvent)
assert evt.code == Return.Code.CURRENT_PAGE_ID_HEAD
assert evt.pid == 0x02
def test_PositionHeadEvent():
msg = [0x67, 0x00, 0x7a, 0x00, 0x1e, 0x01, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, PositionHeadEvent)
assert evt.code == Return.Code.EVENT_POSITION_HEAD
assert evt.x == 122
assert evt.y == 30
assert evt.tevts == Event.Touch.Press
def test_SleepPositionHeadEvent():
msg = [0x68, 0x00, 0x7a, 0x00, 0x1e, 0x01, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, SleepPositionHeadEvent)
assert evt.code == Return.Code.EVENT_SLEEP_POSITION_HEAD
assert evt.x == 122
assert evt.y == 30
assert evt.tevts == Event.Touch.Press
def test_StringHeadEvent():
msg = [0x70, 0x61, 0x62, 0x63, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, StringHeadEvent)
assert evt.code == Return.Code.STRING_HEAD
assert evt.value == "abc"
def test_NumberHeadEvent():
msg = [0x71, 0x66, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, NumberHeadEvent)
assert evt.code == Return.Code.NUMBER_HEAD
assert evt.value == 102
msg = [0x71, 0x66, 0x01, 0x00, 0x00, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, NumberHeadEvent)
assert evt.value == 0x00000166 # 102 + 256
msg = [0x71, 0x01, 0xff, 0x00, 0x00, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, NumberHeadEvent)
assert evt.value == 0x0000ff01 # 65281
msg = [0x71, 0x01, 0x00, 0xff, 0x00, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, NumberHeadEvent)
assert evt.value == 0x00ff0001 # 16711681
msg = [0x71, 0x01, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, NumberHeadEvent)
assert evt.value == 0xff000001
msg = [0x71, 0xff, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, NumberHeadEvent)
min_val, max_val = limits(True, 32) # limits of int32 (signed int 32 bits)
assert evt.value == max_val
msg = [0x71, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert isinstance(evt, NumberHeadEvent)
assert evt.value == 0xfffffffe
assert evt.signed_value == -2
msg = [0x71, 0x00, 0x00, 0x00, 0x80, 0xff, 0xff, 0xff]
evt = MsgEvent.parse(msg)
assert evt.value == 0x80000000
min_val, max_val = limits(True, 32) # limits of int32 (signed int 32 bits)
assert evt.signed_value == min_val
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from downward import suites
from lab.reports import Attribute, gm
from common_setup import IssueConfig, IssueExperiment
SUITE_MCO14 = [
'barman-mco14-strips',
'cavediving-mco14-adl',
'childsnack-mco14-strips',
'citycar-mco14-adl',
'floortile-mco14-strips',
'ged-mco14-strips',
'hiking-mco14-strips',
'maintenance-mco14-adl',
'openstacks-mco14-strips',
'parking-mco14-strips',
'tetris-mco14-strips',
'thoughtful-mco14-strips',
'transport-mco14-strips',
'visitall-mco14-strips',
]
def main(revisions=None):
suite = SUITE_MCO14
configs = [
IssueConfig("astar_goalcount", [
"--search",
"astar(goalcount)"]),
IssueConfig("eager_greedy_ff", [
"--heuristic",
"h=ff()",
"--search",
"eager_greedy(h, preferred=h)"]),
IssueConfig("eager_greedy_add", [
"--heuristic",
"h=add()",
"--search",
"eager_greedy(h, preferred=h)"]),
IssueConfig("eager_greedy_cg", [
"--heuristic",
"h=cg()",
"--search",
"eager_greedy(h, preferred=h)"]),
IssueConfig("eager_greedy_cea", [
"--heuristic",
"h=cea()",
"--search",
"eager_greedy(h, preferred=h)"]),
IssueConfig("lazy_greedy_ff", [
"--heuristic",
"h=ff()",
"--search",
"lazy_greedy(h, preferred=h)"]),
IssueConfig("lazy_greedy_add", [
"--heuristic",
"h=add()",
"--search",
"lazy_greedy(h, preferred=h)"]),
IssueConfig("lazy_greedy_cg", [
"--heuristic",
"h=cg()",
"--search",
"lazy_greedy(h, preferred=h)"]),
IssueConfig("seq_sat_lama_2011", [], driver_options=[
"--alias", "seq-sat-lama-2011"]),
IssueConfig("seq_sat_fdss_1", [], driver_options=[
"--alias", "seq-sat-fdss-1"]),
IssueConfig("seq_sat_fdss_2", [], driver_options=[
"--alias", "seq-sat-fdss-2"]),
]
exp = IssueExperiment(
revisions=revisions,
configs=configs,
suite=suite,
test_suite=[
#'cavediving-sat14-adl:testing01_easy.pddl',
#'childsnack-sat14-strips:child-snack_pfile05.pddl',
#'citycar-sat14-adl:p3-2-2-0-1.pddl',
#'ged-sat14-strips:d-3-6.pddl',
'hiking-sat14-strips:ptesting-1-2-7.pddl',
#'maintenance-sat14-adl:maintenance-1-3-060-180-5-000.pddl',
#'tetris-sat14-strips:p020.pddl',
#'thoughtful-sat14-strips:bootstrap-typed-01.pddl',
#'transport-sat14-strips:p01.pddl',
],
processes=4,
email='silvan.sievers@unibas.ch',
)
exp.add_absolute_report_step()
exp()
main(revisions=['issue602-v1'])
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""API that build and execute recipes wrapped into a task dependency graph.
A Task consists of a 'recipe' (a closure to be executed) and a list of refs to
tasks that should be executed prior to executing this Task (i.e. dependencies).
The responsibility of the recipe of a task is to produce the file with the name
assigned at task creation.
A scenario is a ordered list of tasks to execute such that the dependencies of a
given task are execute before the said task. The scenario is built from a list
of final tasks and a list of frozen tasks:
- A final task is a task to execute ultimately. Therefore the scenario is
composed of final tasks and their required intermediary tasks.
- A frozen task is task to not execute. This is a mechanism to morph a task
that may have dependencies to a task with no dependency at scenario
generation time, injecting what the task have already produced before as an
input of the smaller tasks dependency graph covered by the scenario.
Example:
# -------------------------------------------------- Build my dependency graph
builder = Builder('my/output/dir')
@builder.RegisterTask('out0')
def BuildOut0():
Produce(out=BuildOut0.path)
@builder.RegisterTask('out1')
def BuildOut1():
Produce(out=BuildOut1.path)
@builder.RegisterTask('out2', dependencies=[BuildOut0, BuildOut1])
def BuildOut2():
DoStuff(BuildOut0.path, BuildOut1.path, out=BuildOut2.path)
@builder.RegisterTask('out3', dependencies=[BuildOut0])
def BuildOut3():
DoStuff(BuildOut0.path, out=BuildOut3.path)
# ---------------------------- Case 1: Execute BuildOut3 and its dependencies.
for task in GenerateScenario(final_tasks=[BuildOut3], frozen_tasks=[])
task.Execute()
# ---------- Case 2: Execute BuildOut2 and its dependencies but not BuildOut1.
# It is required that BuildOut1.path is already existing.
for task in GenerateScenario(final_tasks=[BuildOut2],
frozen_tasks=[BuildOut1])
task.Execute()
"""
import argparse
import collections
import datetime
import errno
import logging
import os
import re
import subprocess
import sys
import common_util
_TASK_GRAPH_DOTFILE_NAME = 'tasks_graph.dot'
_TASK_GRAPH_PNG_NAME = 'tasks_graph.png'
_TASK_RESUME_ARGUMENTS_FILE = 'resume.txt'
_TASK_EXECUTION_LOG_NAME_FORMAT = 'task-execution-%Y-%m-%d-%H-%M-%S.log'
FROMFILE_PREFIX_CHARS = '@'
class TaskError(Exception):
pass
class Task(object):
"""Task with a recipe."""
def __init__(self, name, path, dependencies, recipe):
"""Constructor.
Args:
name: The name of the task.
path: Path to the file or directory that this task produces.
dependencies: List of parent task to execute before.
recipe: Function to execute.
"""
self.name = name
self.path = path
self._dependencies = dependencies
self._recipe = recipe
self._is_done = recipe == None
def Execute(self):
"""Executes this task."""
if not self._is_done:
self._recipe()
self._is_done = True
class Builder(object):
"""Utilities for creating sub-graphs of tasks with dependencies."""
def __init__(self, output_directory, output_subdirectory):
"""Constructor.
Args:
output_directory: Output directory where the tasks work.
output_subdirectory: Subdirectory to put all created tasks in or None.
"""
self.output_directory = output_directory
self.output_subdirectory = output_subdirectory
self._tasks = {}
# Caution:
# This decorator may not create a task in the case where merge=True and
# another task having the same name have already been created. In this case,
# it will just reuse the former task. This is at the user responsibility to
# ensure that merged tasks would do the exact same thing.
#
# @builder.RegisterTask('hello')
# def TaskA():
# my_object.a = 1
#
# @builder.RegisterTask('hello', merge=True)
# def TaskB():
# # This function won't be executed ever.
# my_object.a = 2 # <------- Wrong because different from what TaskA do.
#
# assert TaskA == TaskB
# TaskB.Execute() # Sets set my_object.a == 1
def RegisterTask(self, task_name, dependencies=None, merge=False):
"""Decorator that wraps a function into a task.
Args:
task_name: The name of this new task to register.
dependencies: List of SandwichTarget to build before this task.
merge: If a task already have this name, don't create a new one and
reuse the existing one.
Returns:
A Task that was created by wrapping the function or an existing registered
wrapper (that have wrapped a different function).
"""
rebased_task_name = self._RebaseTaskName(task_name)
dependencies = dependencies or []
def InnerAddTaskWithNewPath(recipe):
if rebased_task_name in self._tasks:
if not merge:
raise TaskError('Task {} already exists.'.format(rebased_task_name))
task = self._tasks[rebased_task_name]
return task
task_path = self.RebaseOutputPath(task_name)
task = Task(rebased_task_name, task_path, dependencies, recipe)
self._tasks[rebased_task_name] = task
return task
return InnerAddTaskWithNewPath
def RebaseOutputPath(self, builder_relative_path):
"""Rebases buider relative path."""
return os.path.join(
self.output_directory, self._RebaseTaskName(builder_relative_path))
def _RebaseTaskName(self, task_name):
if self.output_subdirectory:
return os.path.join(self.output_subdirectory, task_name)
return task_name
def GenerateScenario(final_tasks, frozen_tasks):
"""Generates a list of tasks to execute in order of dependencies-first.
Args:
final_tasks: The final tasks to generate the scenario from.
frozen_tasks: Sets of task to freeze.
Returns:
[Task]
"""
scenario = []
task_paths = {}
def InternalAppendTarget(task):
if task in frozen_tasks:
if not os.path.exists(task.path):
raise TaskError('Frozen target `{}`\'s path doesn\'t exist.'.format(
task.name))
return
if task.path in task_paths:
if task_paths[task.path] == None:
raise TaskError('Target `{}` depends on itself.'.format(task.name))
if task_paths[task.path] != task:
raise TaskError(
'Tasks `{}` and `{}` produce the same file: `{}`.'.format(
task.name, task_paths[task.path].name, task.path))
return
task_paths[task.path] = None
for dependency in task._dependencies:
InternalAppendTarget(dependency)
task_paths[task.path] = task
scenario.append(task)
for final_task in final_tasks:
InternalAppendTarget(final_task)
return scenario
def GenerateDependentSetPerTask(scenario):
"""Maps direct dependents per tasks of scenario.
Args:
scenario: The scenario containing the Tasks to map.
Returns:
{Task: set(Task)}
"""
task_set = set(scenario)
task_children = collections.defaultdict(set)
for task in scenario:
for parent in task._dependencies:
if parent in task_set:
task_children[parent].add(task)
return task_children
def ListResumingTasksToFreeze(scenario, final_tasks, skipped_tasks):
"""Lists the tasks that one needs to freeze to be able to resume the scenario
after failure.
Args:
scenario: The scenario (list of Task) to be resumed.
final_tasks: The list of final Task used to generate the scenario.
skipped_tasks: Set of Tasks in the scenario that were skipped.
Returns:
[Task]
"""
scenario_tasks = set(scenario)
assert skipped_tasks.issubset(scenario_tasks)
frozen_tasks = []
frozen_task_set = set()
walked_tasks = set()
def InternalWalk(task):
if task in walked_tasks:
return
walked_tasks.add(task)
if task not in scenario_tasks or task not in skipped_tasks:
if task not in frozen_task_set:
frozen_task_set.add(task)
frozen_tasks.append(task)
else:
for dependency in task._dependencies:
InternalWalk(dependency)
for final_task in final_tasks:
InternalWalk(final_task)
return frozen_tasks
def OutputGraphViz(scenario, final_tasks, output):
"""Outputs the build dependency graph covered by this scenario.
Args:
scenario: The generated scenario.
final_tasks: The final tasks used to generate the scenario.
output: A file-like output stream to receive the dot file.
Graph interpretations:
- Final tasks (the one that where directly appended) are box shaped.
- Non final tasks are ellipse shaped.
- Frozen tasks have a blue shape.
"""
task_execution_ids = {t: i for i, t in enumerate(scenario)}
tasks_node_ids = dict()
def GetTaskNodeId(task):
if task in tasks_node_ids:
return tasks_node_ids[task]
node_id = len(tasks_node_ids)
node_label = task.name
node_color = 'blue'
node_shape = 'ellipse'
if task in task_execution_ids:
node_color = 'black'
node_label = str(task_execution_ids[task]) + ': ' + node_label
if task in final_tasks:
node_shape = 'box'
output.write(' n{} [label="{}", color={}, shape={}];\n'.format(
node_id, node_label, node_color, node_shape))
tasks_node_ids[task] = node_id
return node_id
output.write('digraph graphname {\n')
for task in scenario:
task_node_id = GetTaskNodeId(task)
for dep in task._dependencies:
dep_node_id = GetTaskNodeId(dep)
output.write(' n{} -> n{};\n'.format(dep_node_id, task_node_id))
output.write('}\n')
def CommandLineParser():
"""Creates command line arguments parser meant to be used as a parent parser
for any entry point that use the ExecuteWithCommandLine() function.
The root parser must be created with:
fromfile_prefix_chars=FROMFILE_PREFIX_CHARS.
Returns:
The command line arguments parser.
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-d', '--dry-run', action='store_true',
help='Only prints the tasks to build.')
parser.add_argument('-e', '--to-execute', metavar='REGEX', type=str,
action='append', dest='run_regexes', default=[],
help='Regex selecting tasks to execute.')
parser.add_argument('-f', '--to-freeze', metavar='REGEX', type=str,
action='append', dest='frozen_regexes', default=[],
help='Regex selecting tasks to not execute.')
parser.add_argument('-k', '--keep-going', action='store_true', default=False,
help='Keep going when some targets can\'t be made.')
parser.add_argument('-o', '--output', type=str, required=True,
help='Path of the output directory.')
parser.add_argument('-v', '--output-graphviz', action='store_true',
help='Outputs the {} and {} file in the output directory.'
''.format(_TASK_GRAPH_DOTFILE_NAME, _TASK_GRAPH_PNG_NAME))
return parser
def _SelectTasksFromCommandLineRegexes(args, default_final_tasks):
frozen_regexes = [common_util.VerboseCompileRegexOrAbort(e)
for e in args.frozen_regexes]
run_regexes = [common_util.VerboseCompileRegexOrAbort(e)
for e in args.run_regexes]
# Lists final tasks.
final_tasks = default_final_tasks
if run_regexes:
final_tasks = []
# Traverse the graph in the normal execution order starting from
# |default_final_tasks| in case of command line regex selection.
tasks = GenerateScenario(default_final_tasks, frozen_tasks=set())
# Order of run regexes prevails on the traversing order of tasks.
for regex in run_regexes:
for task in tasks:
if regex.search(task.name):
final_tasks.append(task)
# Lists parents of |final_tasks| to freeze.
frozen_tasks = set()
impossible_tasks = set()
if frozen_regexes:
complete_scenario = GenerateScenario(final_tasks, frozen_tasks=set())
dependents_per_task = GenerateDependentSetPerTask(complete_scenario)
def MarkTaskAsImpossible(task):
if task in impossible_tasks:
return
impossible_tasks.add(task)
for dependent in dependents_per_task[task]:
MarkTaskAsImpossible(dependent)
for task in complete_scenario:
for regex in frozen_regexes:
if regex.search(task.name):
if os.path.exists(task.path):
frozen_tasks.add(task)
else:
MarkTaskAsImpossible(task)
break
return [t for t in final_tasks if t not in impossible_tasks], frozen_tasks
class _ResumingFileBuilder(object):
def __init__(self, args):
resume_path = os.path.join(args.output, _TASK_RESUME_ARGUMENTS_FILE)
self._resume_output = open(resume_path, 'w')
# List initial freezing regexes not to loose track of final targets to
# freeze in case of severals resume attempts caused by sudden death.
for regex in args.frozen_regexes:
self._resume_output.write('-f\n{}\n'.format(regex))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
del exc_type, exc_value, exc_traceback # unused
self._resume_output.close()
def OnTaskSuccess(self, task):
# Log the succeed tasks so that they are ensured to be frozen in case
# of a sudden death.
self._resume_output.write('-f\n^{}$\n'.format(re.escape(task.name)))
# Makes sure the task freezing command line make it to the disk.
self._resume_output.flush()
os.fsync(self._resume_output.fileno())
def OnScenarioFinish(
self, scenario, final_tasks, failed_tasks, skipped_tasks):
resume_additonal_arguments = []
for task in ListResumingTasksToFreeze(
scenario, final_tasks, skipped_tasks):
resume_additonal_arguments.extend(
['-f', '^{}$'.format(re.escape(task.name))])
self._resume_output.seek(0)
self._resume_output.truncate()
self._resume_output.write('\n'.join(resume_additonal_arguments))
print '# Looks like something went wrong in tasks:'
for failed_task in failed_tasks:
print '# {}'.format(failed_task.name)
print '#'
print '# To resume, append the following parameter:'
print '# ' + FROMFILE_PREFIX_CHARS + self._resume_output.name
def ExecuteWithCommandLine(args, default_final_tasks):
"""Helper to execute tasks using command line arguments.
Args:
args: Command line argument parsed with CommandLineParser().
default_final_tasks: Default final tasks if there is no -r command
line arguments.
Returns:
0 if success or 1 otherwise
"""
# Builds the scenario.
final_tasks, frozen_tasks = _SelectTasksFromCommandLineRegexes(
args, default_final_tasks)
scenario = GenerateScenario(final_tasks, frozen_tasks)
if len(scenario) == 0:
logging.error('No tasks to build.')
return 1
if not os.path.isdir(args.output):
os.makedirs(args.output)
# Print the task dependency graph visualization.
if args.output_graphviz:
graphviz_path = os.path.join(args.output, _TASK_GRAPH_DOTFILE_NAME)
png_graph_path = os.path.join(args.output, _TASK_GRAPH_PNG_NAME)
with open(graphviz_path, 'w') as output:
OutputGraphViz(scenario, final_tasks, output)
subprocess.check_call(['dot', '-Tpng', graphviz_path, '-o', png_graph_path])
# Use the build scenario.
if args.dry_run:
for task in scenario:
print task.name
return 0
# Run the Scenario while saving intermediate state to be able to resume later.
failed_tasks = []
tasks_to_skip = set()
dependents_per_task = GenerateDependentSetPerTask(scenario)
def MarkTaskNotToExecute(task):
if task not in tasks_to_skip:
logging.warning('can not execute task: %s', task.name)
tasks_to_skip.add(task)
for dependent in dependents_per_task[task]:
MarkTaskNotToExecute(dependent)
log_filename = datetime.datetime.now().strftime(
_TASK_EXECUTION_LOG_NAME_FORMAT)
formatter = logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s')
handler = logging.FileHandler(
os.path.join(args.output, log_filename), mode='a')
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
logging.info(
'%s %s', '-' * 60, common_util.GetCommandLineForLogging(sys.argv))
try:
with _ResumingFileBuilder(args) as resume_file_builder:
for task_execute_id, task in enumerate(scenario):
if task in tasks_to_skip:
continue
logging.info('%s %s', '-' * 60, task.name)
try:
task.Execute()
except (MemoryError, SyntaxError):
raise
except BaseException:
# The resuming file being incrementally generated by
# resume_file_builder.OnTaskSuccess() is automatically fsynced().
# But resume_file_builder.OnScenarioFinish() completely rewrite
# this file with the mininal subset of task to freeze, and in case
# of an ENOSPC, we don't want to touch the resuming file at all so
# that it remains uncorrupted.
if (sys.exc_info()[0] == IOError and
sys.exc_info()[1].errno == errno.ENOSPC):
raise
logging.exception('%s %s failed', '-' * 60, task.name)
failed_tasks.append(task)
if args.keep_going and sys.exc_info()[0] != KeyboardInterrupt:
MarkTaskNotToExecute(task)
else:
tasks_to_skip.update(set(scenario[task_execute_id:]))
break
else:
resume_file_builder.OnTaskSuccess(task)
if tasks_to_skip:
assert failed_tasks
resume_file_builder.OnScenarioFinish(
scenario, final_tasks, failed_tasks, tasks_to_skip)
if sys.exc_info()[0] == KeyboardInterrupt:
raise
return 1
finally:
logging.getLogger().removeHandler(handler)
assert not failed_tasks
return 0
|
import os
def data_filename(filename: str) -> str:
"""Returns the absolute path of a file in the testdata directory.
Parameters
----------
filename : str
Relative filename
Returns
-------
str
Absolute filename
"""
return os.path.join(os.path.dirname(__file__), filename)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.